diff --git a/.github/workflows/python-ci-docs.yml b/.github/workflows/python-ci-docs.yml new file mode 100644 index 000000000000..ec3d75b3c754 --- /dev/null +++ b/.github/workflows/python-ci-docs.yml @@ -0,0 +1,54 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +name: "Python CI" +on: + push: + branches: + - 'master' + - '0.**' + tags: + - 'apache-iceberg-**' + pull_request: + paths: + - '.github/workflows/python-ci.yml' + - 'python/**' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ github.event_name == 'pull_request' }} + +jobs: + tox: + runs-on: ubuntu-20.04 + strategy: + matrix: + python: ['3.8'] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + - name: Install + working-directory: ./python/docs + run: pip install -r requirements.txt + - name: Deploy + working-directory: ./python/docs + run: mkdocs gh-deploy diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 000000000000..acc69b0539d2 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,13 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +mkdocs: + configuration: python/docs/mkdocs.yml + +python: + install: + - requirements: python/docs/requirements.txt diff --git a/docs/python-api-intro.md b/docs/python-api-intro.md deleted file mode 100644 index 80c1039d0cef..000000000000 --- a/docs/python-api-intro.md +++ /dev/null @@ -1,184 +0,0 @@ ---- -title: "Python API" -url: python-api-intro -aliases: - - "python/api-intro" -menu: - main: - parent: "API" - weight: 500 ---- - - -# Iceberg Python API - -Much of the python api conforms to the java api. You can get more info about the java api [here](../api). - -## Catalog - -The Catalog interface, like java provides search and management operations for tables. - -To create a catalog: - -``` python -from iceberg.hive import HiveTables - -# instantiate Hive Tables -conf = {"hive.metastore.uris": 'thrift://{hms_host}:{hms_port}', - "hive.metastore.warehouse.dir": {tmpdir} } -tables = HiveTables(conf) -``` - -and to create a table from a catalog: - -``` python -from iceberg.api.schema import Schema\ -from iceberg.api.types import TimestampType, DoubleType, StringType, NestedField -from iceberg.api.partition_spec import PartitionSpecBuilder - -schema = Schema(NestedField.optional(1, "DateTime", TimestampType.with_timezone()), - NestedField.optional(2, "Bid", DoubleType.get()), - NestedField.optional(3, "Ask", DoubleType.get()), - NestedField.optional(4, "symbol", StringType.get())) -partition_spec = PartitionSpecBuilder(schema).add(1, 1000, "DateTime_day", "day").build() - -tables.create(schema, "test.test_123", partition_spec) -``` - - -## Tables - -The Table interface provides access to table metadata - -+ schema returns the current table `Schema` -+ spec returns the current table `PartitonSpec` -+ properties returns a map of key-value `TableProperties` -+ currentSnapshot returns the current table `Snapshot` -+ snapshots returns all valid snapshots for the table -+ snapshot(id) returns a specific snapshot by ID -+ location returns the table’s base location - -Tables also provide refresh to update the table to the latest version. - -### Scanning -Iceberg table scans start by creating a `TableScan` object with `newScan`. - -``` python -scan = table.new_scan(); -``` - -To configure a scan, call filter and select on the `TableScan` to get a new `TableScan` with those changes. - -``` python -filtered_scan = scan.filter(Expressions.equal("id", 5)) -``` - -String expressions can also be passed to the filter method. - -``` python -filtered_scan = scan.filter("id=5") -``` - -`Schema` projections can be applied against a `TableScan` by passing a list of column names. - -``` python -filtered_scan = scan.select(["col_1", "col_2", "col_3"]) -``` - -Because some data types cannot be read using the python library, a convenience method for excluding columns from projection is provided. - -``` python -filtered_scan = scan.select_except(["unsupported_col_1", "unsupported_col_2"]) -``` - - -Calls to configuration methods create a new `TableScan` so that each `TableScan` is immutable. - -When a scan is configured, `planFiles`, `planTasks`, and `Schema` are used to return files, tasks, and the read projection. - -``` python -scan = table.new_scan() \ - .filter("id=5") \ - .select(["id", "data"]) - -projection = scan.schema -for task in scan.plan_tasks(): - print(task) -``` - -## Types - -Iceberg data types are located in `iceberg.api.types.types` - -### Primitives - -Primitive type instances are available from static methods in each type class. Types without parameters use `get`, and types like `DecimalType` use factory methods: - -```python -IntegerType.get() # int -DoubleType.get() # double -DecimalType.of(9, 2) # decimal(9, 2) -``` - -### Nested types -Structs, maps, and lists are created using factory methods in type classes. - -Like struct fields, map keys or values and list elements are tracked as nested fields. Nested fields track [field IDs](https://iceberg.apache.org/evolution/#correctness) and nullability. - -Struct fields are created using `NestedField.optional` or `NestedField.required`. Map value and list element nullability is set in the map and list factory methods. - -```python -# struct<1 id: int, 2 data: optional string> -struct = StructType.of([NestedField.required(1, "id", IntegerType.get()), - NestedField.optional(2, "data", StringType.get()]) - ) -``` -```python -# map<1 key: int, 2 value: optional string> -map_var = MapType.of_optional(1, IntegerType.get(), - 2, StringType.get()) -``` -```python -# array<1 element: int> -list_var = ListType.of_required(1, IntegerType.get()); -``` - -## Expressions -Iceberg’s `Expressions` are used to configure table scans. To create `Expressions`, use the factory methods in `Expressions`. - -Supported `Predicate` expressions are: - -+ `is_null` -+ `not_null` -+ `equal` -+ `not_equal` -+ `less_than` -+ `less_than_or_equal` -+ `greater_than` -+ `greater_than_or_equal` - -Supported expression `Operations`are: - -+ `and` -+ `or` -+ `not` - -Constant expressions are: - -+ `always_true` -+ `always_false` diff --git a/docs/python-feature-support.md b/docs/python-feature-support.md deleted file mode 100644 index 4488e4763bac..000000000000 --- a/docs/python-feature-support.md +++ /dev/null @@ -1,79 +0,0 @@ ---- -title: "Python Feature Support" -url: python-feature-support -aliases: - - "python/feature-support" -menu: - main: - parent: "API" - weight: 600 ---- - - -# Feature Support - -The goal is that the python library will provide a functional, performant subset of the java library. The initial focus has been on reading table metadata as well as providing the capability to both plan and execute a scan. - -## Feature Comparison - -### Metadata - -| Operation | Java | Python | -|:------------------------|:-----:|:------:| -| Get Schema | X | X | -| Get Snapshots | X | X | -| Plan Scan | X | X | -| Plan Scan for Snapshot | X | X | -| Update Current Snapshot | X | | -| Set Table Properties | X | | -| Create Table | X | X | -| Drop Table | X | X | -| Alter Table | X | | - - -### Read Support - -Pyarrow is used for reading parquet files, so read support is limited to what is currently supported in the pyarrow.parquet package. - -#### Primitive Types - - -| Data Type | Java | Python | -|:------------------------|:----:|:------:| -| BooleanType | X | X | -| DateType | X | X | -| DecimalType | X | X | -| FloatType | X | X | -| IntegerType | X | X | -| LongType | X | X | -| TimeType | X | X | -| TimestampType | X | X | - -#### Nested Types - -| Data Type | Java | Python | -|:------------------------|:----:|:------:| -| ListType of primitives | X | X | -| MapType of primitives | X | X | -| StructType of primitives| X | X | -| ListType of Nested Types| X | | -| MapType of Nested Types | X | | - -### Write Support - -The python client does not currently support write capability diff --git a/docs/python-quickstart.md b/docs/python-quickstart.md deleted file mode 100644 index 03c3acbe81a5..000000000000 --- a/docs/python-quickstart.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -title: "Python Quickstart" -url: python-quickstart -aliases: - - "python/quickstart" -menu: - main: - parent: "API" - weight: 400 ---- - - - -# Python API Quickstart - -## Installation - -Iceberg python is currently in development, for development and testing purposes the best way to install the library is to perform the following steps: -``` -git clone https://github.com/apache/iceberg.git -cd iceberg/python -pip install -e . -``` - -## Testing -Testing is done using tox. The config can be found in `tox.ini` within the python directory of the iceberg project. - -``` -# simply run tox from within the python dir -tox -``` - -# Examples - -## Inspect Table Metadata -``` python - -from iceberg.hive import HiveTables - -# instantiate Hive Tables -conf = {"hive.metastore.uris": 'thrift://{hms_host}:{hms_port}'} -tables = HiveTables(conf) - -# load table -tbl = tables.load("iceberg_db.iceberg_test_table") - -# inspect metadata -print(tbl.schema()) -print(tbl.spec()) -print(tbl.location()) - -# get table level record count -from pprint import pprint -pprint(int(tbl.current_snapshot().summary.get("total-records"))) -``` diff --git a/python/docs/docs/index.md b/python/docs/docs/index.md new file mode 100644 index 000000000000..c31dd3c8d6f4 --- /dev/null +++ b/python/docs/docs/index.md @@ -0,0 +1,418 @@ + + +# PyIceberg + +Much of the python api conforms to the Java API. You can get more info about the java api [here](../api). + +## Installing + +You can install the latest release version from pypi: + +```sh +pip3 install "pyiceberg[s3fs,hive]" +``` + +Or install the latest development version locally: + +``` +pip3 install poetry --upgrade +pip3 install -e ".[s3fs,hive]" +``` + +With optional dependencies: + +| Key | Description: | +|-----------|----------------------------------------------------------------------| +| hive | Support for the Hive metastore | +| pyarrow | PyArrow as a FileIO implementation to interact with the object store | +| s3fs | S3FS as a FileIO implementation to interact with the object store | +| snappy | Support for snappy Avro compression | + +# Python CLI Quickstart + +Pyiceberg ships with a CLI that's available after installing the `pyiceberg` package. + +```sh +➜ pyiceberg --help +Usage: pyiceberg [OPTIONS] COMMAND [ARGS]... + +Options: + --catalog TEXT + --verbose BOOLEAN + --output [text|json] + --uri TEXT + --credential TEXT + --help Show this message and exit. + +Commands: + describe Describes a namespace xor table + drop Operations to drop a namespace or table + list Lists tables or namespaces + location Returns the location of the table + properties Properties on tables/namespaces + rename Renames a table + schema Gets the schema of the table + spec Returns the partition spec of the table + uuid Returns the UUID of the table +``` + + +# Configuration + +There are three ways of setting the configuration. + +For the CLI you can pass it in using `--uri` and `--credential` and it will automatically detect the type based on the scheme (`http(s)` for rest, `thrift` for Hive). + +Secondly, YAML based configuration is supported `cat ~/.pyiceberg.yaml`: + +```yaml +catalog: + default: + uri: thrift://localhost:9083 + + rest: + uri: http://rest-catalog/ws/ + credential: t-1234:secret +``` + +Lastly, you can also set it using environment variables: + +```sh +export PYICEBERG_CATALOG__DEFAULT__URI=thrift://localhost:9083 + +export PYICEBERG_CATALOG__REST__URI=http://rest-catalog/ws/ +export PYICEBERG_CATALOG__REST__CREDENTIAL=t-1234:secret +``` + +Where the structure is equivalent to the YAML. The levels are separated using a double underscore (`__`). + +# CLI Quickstart + +This example assumes that you have a default catalog set. If you want to load another catalog, for example, the rest example above. Then you need to set `--catalog rest`. + +```sh +➜ pyiceberg list +default +nyc +``` + +```sh +➜ pyiceberg list nyc +nyc.taxis +``` + +```sh +pyiceberg describe nyc.taxis +Table format version 1 +Metadata location file:/.../nyc.db/taxis/metadata/00000-aa3a3eac-ea08-4255-b890-383a64a94e42.metadata.json +Table UUID 6cdfda33-bfa3-48a7-a09e-7abb462e3460 +Last Updated 1661783158061 +Partition spec [] +Sort order [] +Current schema Schema, id=0 + ├── 1: VendorID: optional long + ├── 2: tpep_pickup_datetime: optional timestamptz + ├── 3: tpep_dropoff_datetime: optional timestamptz + ├── 4: passenger_count: optional double + ├── 5: trip_distance: optional double + ├── 6: RatecodeID: optional double + ├── 7: store_and_fwd_flag: optional string + ├── 8: PULocationID: optional long + ├── 9: DOLocationID: optional long + ├── 10: payment_type: optional long + ├── 11: fare_amount: optional double + ├── 12: extra: optional double + ├── 13: mta_tax: optional double + ├── 14: tip_amount: optional double + ├── 15: tolls_amount: optional double + ├── 16: improvement_surcharge: optional double + ├── 17: total_amount: optional double + ├── 18: congestion_surcharge: optional double + └── 19: airport_fee: optional double +Current snapshot Operation.APPEND: id=5937117119577207079, schema_id=0 +Snapshots Snapshots + └── Snapshot 5937117119577207079, schema 0: file:/.../nyc.db/taxis/metadata/snap-5937117119577207079-1-94656c4f-4c66-4600-a4ca-f30377300527.avro +Properties owner root + write.format.default parquet +``` + +Or output in JSON for automation: + +```sh +pyiceberg --output json describe nyc.taxis | jq +{ + "identifier": [ + "nyc", + "taxis" + ], + "metadata_location": "file:/.../nyc.db/taxis/metadata/00000-aa3a3eac-ea08-4255-b890-383a64a94e42.metadata.json", + "metadata": { + "location": "file:/.../nyc.db/taxis", + "table-uuid": "6cdfda33-bfa3-48a7-a09e-7abb462e3460", + "last-updated-ms": 1661783158061, + "last-column-id": 19, + "schemas": [ + { + "type": "struct", + "fields": [ + { + "id": 1, + "name": "VendorID", + "type": "long", + "required": false + }, +... + { + "id": 19, + "name": "airport_fee", + "type": "double", + "required": false + } + ], + "schema-id": 0, + "identifier-field-ids": [] + } + ], + "current-schema-id": 0, + "partition-specs": [ + { + "spec-id": 0, + "fields": [] + } + ], + "default-spec-id": 0, + "last-partition-id": 999, + "properties": { + "owner": "root", + "write.format.default": "parquet" + }, + "current-snapshot-id": 5937117119577207000, + "snapshots": [ + { + "snapshot-id": 5937117119577207000, + "timestamp-ms": 1661783158061, + "manifest-list": "file:/.../nyc.db/taxis/metadata/snap-5937117119577207079-1-94656c4f-4c66-4600-a4ca-f30377300527.avro", + "summary": { + "operation": "append", + "spark.app.id": "local-1661783139151", + "added-data-files": "1", + "added-records": "2979431", + "added-files-size": "46600777", + "changed-partition-count": "1", + "total-records": "2979431", + "total-files-size": "46600777", + "total-data-files": "1", + "total-delete-files": "0", + "total-position-deletes": "0", + "total-equality-deletes": "0" + }, + "schema-id": 0 + } + ], + "snapshot-log": [ + { + "snapshot-id": "5937117119577207079", + "timestamp-ms": 1661783158061 + } + ], + "metadata-log": [], + "sort-orders": [ + { + "order-id": 0, + "fields": [] + } + ], + "default-sort-order-id": 0, + "refs": { + "main": { + "snapshot-id": 5937117119577207000, + "type": "branch" + } + }, + "format-version": 1, + "schema": { + "type": "struct", + "fields": [ + { + "id": 1, + "name": "VendorID", + "type": "long", + "required": false + }, +... + { + "id": 19, + "name": "airport_fee", + "type": "double", + "required": false + } + ], + "schema-id": 0, + "identifier-field-ids": [] + }, + "partition-spec": [] + } +} +``` + +# Python API + +To instantiate a catalog: + +``` python +>>> from pyiceberg.catalog.hive import HiveCatalog +>>> catalog = HiveCatalog(name='prod', uri='thrift://localhost:9083/') + +>>> catalog.list_namespaces() +[('default',), ('nyc',)] + +>>> catalog.list_tables('nyc') +[('nyc', 'taxis')] + +>>> catalog.load_table(('nyc', 'taxis')) +Table(identifier=('nyc', 'taxis'), ...) +``` + +And to create a table from a catalog: + +``` python +from pyiceberg.schema import Schema +from pyiceberg.types import TimestampType, DoubleType, StringType, NestedField + +schema = Schema( + NestedField(field_id=1, name="datetime", field_type=TimestampType(), required=False), + NestedField(field_id=2, name="bid", field_type=DoubleType(), required=False), + NestedField(field_id=3, name="ask", field_type=DoubleType(), required=False), + NestedField(field_id=4, name="symbol", field_type=StringType(), required=False), +) + +from pyiceberg.table.partitioning import PartitionSpec, PartitionField +from pyiceberg.transforms import DayTransform + +partition_spec = PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=DayTransform(), name="datetime_day") +) + +from pyiceberg.table.sorting import SortOrder, SortField +from pyiceberg.transforms import IdentityTransform + +sort_order = SortOrder( + SortField(source_id=4, transform=IdentityTransform()) +) + +from pyiceberg.catalog.hive import HiveCatalog +catalog = HiveCatalog(name='prod', uri='thrift://localhost:9083/') + +catalog.create_table( + identifier='default.bids', + location='/Users/fokkodriesprong/Desktop/docker-spark-iceberg/wh/bids/', + schema=schema, + partition_spec=partition_spec, + sort_order=sort_order +) +``` + +Which returns a newly created table: + +``` +Table( + identifier=('default', 'bids'), + metadata_location='/Users/fokkodriesprong/Desktop/docker-spark-iceberg/wh/bids//metadata/00000-c8cd93ab-f784-474d-a167-b1a86b05195f.metadata.json', + metadata=TableMetadataV2( + location='/Users/fokkodriesprong/Desktop/docker-spark-iceberg/wh/bids/', + table_uuid=UUID('38d4cb39-4945-4bf2-b374-984b5c4984d2'), + last_updated_ms=1661847562069, + last_column_id=4, + schemas=[ + Schema( + NestedField(field_id=1, name='datetime', field_type=TimestampType(), required=False), + NestedField(field_id=2, name='bid', field_type=DoubleType(), required=False), + NestedField(field_id=3, name='ask', field_type=DoubleType(), required=False), + NestedField(field_id=4, name='symbol', field_type=StringType(), required=False)), + schema_id=1, + identifier_field_ids=[]) + ], + current_schema_id=1, + partition_specs=[ + PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=DayTransform(), name='datetime_day'),)) + ], + default_spec_id=0, + last_partition_id=1000, + properties={}, + current_snapshot_id=None, + snapshots=[], + snapshot_log=[], + metadata_log=[], + sort_orders=[ + SortOrder(order_id=1, fields=[SortField(source_id=4, transform=IdentityTransform(), direction=SortDirection.ASC, null_order=NullOrder.NULLS_FIRST)]) + ], + default_sort_order_id=1, + refs={}, + format_version=2, + last_sequence_number=0 + ) +) +``` + +# Feature Support + +The goal is that the python library will provide a functional, performant subset of the Java library. The initial focus has been on reading table metadata and provide a convenient CLI to go through the catalog. + +## Metadata + +| Operation | Java | Python | +|:------------------------|:-----:|:------:| +| Get Schema | X | X | +| Get Snapshots | X | X | +| Plan Scan | X | | +| Plan Scan for Snapshot | X | | +| Update Current Snapshot | X | | +| Set Table Properties | X | X | +| Create Table | X | X | +| Drop Table | X | X | +| Alter Table | X | | + + +## Types + +The types are located in `pyiceberg.types`. + +Primitive types: + +- BooleanType +- StringType +- IntegerType +- LongType +- FloatType +- DoubleType +- DateType +- TimeType +- TimestampType +- TimestamptzType +- BinaryType +- UUIDType + +Complex types: + +- StructType +- ListType +- MapType +- FixedType(16) +- DecimalType(8, 3) + diff --git a/python/docs/mkdocs.yml b/python/docs/mkdocs.yml new file mode 100644 index 000000000000..4b7f59bf0236 --- /dev/null +++ b/python/docs/mkdocs.yml @@ -0,0 +1 @@ +site_name: PyIceberg diff --git a/python/docs/requirements.txt b/python/docs/requirements.txt new file mode 100644 index 000000000000..d8d81c0d6e0e --- /dev/null +++ b/python/docs/requirements.txt @@ -0,0 +1,2 @@ +mkdocs==1.3.1 +jinja2==3.0.3 \ No newline at end of file diff --git a/python/poetry.lock b/python/poetry.lock index f57261c4410a..2584e92dc33a 100644 --- a/python/poetry.lock +++ b/python/poetry.lock @@ -436,7 +436,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.10.1" +version = "1.10.2" description = "Data validation and settings management using python type hints" category = "main" optional = false @@ -749,7 +749,7 @@ snappy = ["python-snappy"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "8aadbca4c1e2def4875c5acc9fba8e9bc049e86d734d6301d2a9451e537eb21d" +content-hash = "85e7cae53b49cf6e3ee94c39ddd7af763224c3c7002736e7fc90c0667521efe8" [metadata.files] aiobotocore = [ @@ -1288,42 +1288,42 @@ pycparser = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydantic = [ - {file = "pydantic-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:221166d99726238f71adc4fa9f3e94063a10787574b966f86a774559e709ac5a"}, - {file = "pydantic-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a90e85d95fd968cd7cae122e0d3e0e1f6613bc88c1ff3fe838ac9785ea4b1c4c"}, - {file = "pydantic-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2157aaf5718c648eaec9e654a34179ae42ffc363dc3ad058538a4f3ecbd9341"}, - {file = "pydantic-1.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6142246fc9adb51cadaeb84fb52a86f3adad4c6a7b0938a5dd0b1356b0088217"}, - {file = "pydantic-1.10.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:60dad97a09b6f44690c05467a4f397b62bfc2c839ac39102819d6979abc2be0d"}, - {file = "pydantic-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6f5bcb59d33ec46621dae76e714c53035087666cac80c81c9047a84f3ff93d0"}, - {file = "pydantic-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:522906820cd60e63c7960ba83078bf2d2ad2dd0870bf68248039bcb1ec3eb0a4"}, - {file = "pydantic-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d545c89d88bdd5559db17aeb5a61a26799903e4bd76114779b3bf1456690f6ce"}, - {file = "pydantic-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad2374b5b3b771dcc6e2f6e0d56632ab63b90e9808b7a73ad865397fcdb4b2cd"}, - {file = "pydantic-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90e02f61b7354ed330f294a437d0bffac9e21a5d46cb4cc3c89d220e497db7ac"}, - {file = "pydantic-1.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc5ffe7bd0b4778fa5b7a5f825c52d6cfea3ae2d9b52b05b9b1d97e36dee23a8"}, - {file = "pydantic-1.10.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7acb7b66ffd2bc046eaff0063df84c83fc3826722d5272adaeadf6252e17f691"}, - {file = "pydantic-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7e6786ed5faa559dea5a77f6d2de9a08d18130de9344533535d945f34bdcd42e"}, - {file = "pydantic-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:c7bf8ff1d18186eb0cbe42bd9bfb4cbf7fde1fd01b8608925458990c21f202f0"}, - {file = "pydantic-1.10.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14a5babda137a294df7ad5f220986d79bbb87fdeb332c6ded61ce19da7f5f3bf"}, - {file = "pydantic-1.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5659cb9c6b3d27fc0067025c4f5a205f5e838232a4a929b412781117c2343d44"}, - {file = "pydantic-1.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d70fb91b03c32d2e857b071a22a5225e6b625ca82bd2cc8dd729d88e0bd200"}, - {file = "pydantic-1.10.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9a93be313e40f12c6f2cb84533b226bbe23d0774872e38d83415e6890215e3a6"}, - {file = "pydantic-1.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d55aeb01bb7bd7c7e1bd904668a4a2ffcbb1c248e7ae9eb40a272fd7e67dd98b"}, - {file = "pydantic-1.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:43d41b6f13706488e854729955ba8f740e6ec375cd16b72b81dc24b9d84f0d15"}, - {file = "pydantic-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f31ffe0e38805a0e6410330f78147bb89193b136d7a5f79cae60d3e849b520a6"}, - {file = "pydantic-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8eee69eda7674977b079a21e7bf825b59d8bf15145300e8034ed3eb239ac444f"}, - {file = "pydantic-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f927bff6c319fc92e0a2cbeb2609b5c1cd562862f4b54ec905e353282b7c8b1"}, - {file = "pydantic-1.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb1bc3f8fef6ba36977108505e90558911e7fbccb4e930805d5dd90891b56ff4"}, - {file = "pydantic-1.10.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96ab6ce1346d14c6e581a69c333bdd1b492df9cf85ad31ad77a8aa42180b7e09"}, - {file = "pydantic-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:444cf220a12134da1cd42fe4f45edff622139e10177ce3d8ef2b4f41db1291b2"}, - {file = "pydantic-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:dbfbff83565b4514dd8cebc8b8c81a12247e89427ff997ad0a9da7b2b1065c12"}, - {file = "pydantic-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5327406f4bfd5aee784e7ad2a6a5fdd7171c19905bf34cb1994a1ba73a87c468"}, - {file = "pydantic-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1072eae28bf034a311764c130784e8065201a90edbca10f495c906737b3bd642"}, - {file = "pydantic-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce901335667a68dfbc10dd2ee6c0d676b89210d754441c2469fbc37baf7ee2ed"}, - {file = "pydantic-1.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54d6465cd2112441305faf5143a491b40de07a203116b5755a2108e36b25308d"}, - {file = "pydantic-1.10.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2b5e5e7a0ec96704099e271911a1049321ba1afda92920df0769898a7e9a1298"}, - {file = "pydantic-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ae43704358304da45c1c3dd7056f173c618b252f91594bcb6d6f6b4c6c284dee"}, - {file = "pydantic-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:2d7da49229ffb1049779a5a6c1c50a26da164bd053cf8ee9042197dc08a98259"}, - {file = "pydantic-1.10.1-py3-none-any.whl", hash = "sha256:f8b10e59c035ff3dcc9791619d6e6c5141e0fa5cbe264e19e267b8d523b210bf"}, - {file = "pydantic-1.10.1.tar.gz", hash = "sha256:d41bb80347a8a2d51fbd6f1748b42aca14541315878447ba159617544712f770"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, + {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, + {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, + {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, + {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, + {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, + {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, + {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, + {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] pygments = [ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, diff --git a/python/pyproject.toml b/python/pyproject.toml index 7be9f64afb0e..b9f1e612edb1 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -48,7 +48,7 @@ click = "^8.1.3" rich = "^12.5.1" pyyaml = "^6.0.0" -pydantic = "^1.9.2" +pydantic = "^1.10.2" fsspec = "2022.5.0" pyarrow = { version = "^9.0.0", optional = true }