diff --git a/.asf.yaml b/.asf.yaml index 2cc41779e3..b1f557e903 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -44,6 +44,7 @@ github: projects: true collaborators: # Note: the number of collaborators is limited to 10 - ajantha-bhat + - syun64 ghp_branch: gh-pages ghp_path: / diff --git a/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml b/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml index 41110cf397..f907f681b1 100644 --- a/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml @@ -9,7 +9,8 @@ body: description: What Apache Iceberg version are you using? multiple: false options: - - "0.5.0 (latest release)" + - "0.6.0 (latest release)" + - "0.5.0" - "0.4.0" - "0.3.0" - "0.2.0" diff --git a/.github/workflows/check-md-link.yml b/.github/workflows/check-md-link.yml new file mode 100644 index 0000000000..eec019a19c --- /dev/null +++ b/.github/workflows/check-md-link.yml @@ -0,0 +1,16 @@ +name: Check Markdown links + +on: + push: + paths: + - mkdocs/** + branches: + - 'main' + pull_request: + +jobs: + markdown-link-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + - uses: gaurav-nelson/github-action-markdown-link-check@v1 diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 9c4d1c848b..54446049a4 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -34,7 +34,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ ubuntu-22.04, windows-2022, macos-11 ] + os: [ ubuntu-22.04, windows-2022, macos-11, macos-12, macos-13, macos-14 ] steps: - uses: actions/checkout@v4 @@ -43,7 +43,7 @@ jobs: - uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.11' - name: Install poetry run: pip install poetry @@ -59,7 +59,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu') - name: Build wheels - uses: pypa/cibuildwheel@v2.16.2 + uses: pypa/cibuildwheel@v2.17.0 with: output-dir: wheelhouse config-file: "pyproject.toml" @@ -67,7 +67,7 @@ jobs: # Ignore 32 bit architectures CIBW_ARCHS: "auto64" CIBW_PROJECT_REQUIRES_PYTHON: ">=3.8,<3.12" - CIBW_TEST_REQUIRES: "pytest==7.4.2 moto==4.2.2" + CIBW_TEST_REQUIRES: "pytest==7.4.2 moto==5.0.1" CIBW_TEST_EXTRAS: "s3fs,glue" CIBW_TEST_COMMAND: "pytest {project}/tests/avro/test_decoder.py" # There is an upstream issue with installing on MacOSX @@ -80,7 +80,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu') run: ls -lah dist/* && cp dist/* wheelhouse/ - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v3 with: name: "release-${{ github.event.inputs.version }}" path: ./wheelhouse/* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c4424513d2..afdabf6fce 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,23 +29,20 @@ repos: - id: check-ast - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version (Used for linting) - rev: v0.1.0 + rev: v0.1.8 hooks: - id: ruff - args: [ --fix, --exit-non-zero-on-fix ] - - repo: https://github.com/ambv/black - rev: 23.10.0 - hooks: - - id: black - args: [--skip-string-normalization] + args: [ --fix, --exit-non-zero-on-fix, --preview ] + - id: ruff-format + args: [ --preview ] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.6.1 + rev: v1.8.0 hooks: - id: mypy args: [--install-types, --non-interactive, --config=pyproject.toml] - repo: https://github.com/hadialqattan/pycln - rev: v2.3.0 + rev: v2.4.0 hooks: - id: pycln args: [--config=pyproject.toml] diff --git a/Makefile b/Makefile index 04c57b30e3..c3e816ebd5 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ install-poetry: pip install poetry==1.7.1 install-dependencies: - poetry install -E pyarrow -E hive -E s3fs -E glue -E adlfs -E duckdb -E ray -E sql-postgres -E gcsfs + poetry install -E pyarrow -E hive -E s3fs -E glue -E adlfs -E duckdb -E ray -E sql-postgres -E gcsfs -E sql-sqlite -E daft install: | install-poetry install-dependencies diff --git a/NOTICE b/NOTICE index d7a3c57526..adcae2d516 100644 --- a/NOTICE +++ b/NOTICE @@ -1,6 +1,6 @@ Apache Iceberg -Copyright 2017-2022 The Apache Software Foundation +Copyright 2017-2024 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). diff --git a/dev/Dockerfile b/dev/Dockerfile index 1f001f5c12..44783d0809 100644 --- a/dev/Dockerfile +++ b/dev/Dockerfile @@ -38,9 +38,8 @@ WORKDIR ${SPARK_HOME} ENV SPARK_VERSION=3.4.2 ENV ICEBERG_SPARK_RUNTIME_VERSION=3.4_2.12 -ENV ICEBERG_VERSION=1.4.0 -ENV AWS_SDK_VERSION=2.20.18 -ENV PYICEBERG_VERSION=0.4.0 +ENV ICEBERG_VERSION=1.4.2 +ENV PYICEBERG_VERSION=0.5.1 RUN curl --retry 3 -s -C - https://dlcdn.apache.org/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop3.tgz -o spark-${SPARK_VERSION}-bin-hadoop3.tgz \ && tar xzf spark-${SPARK_VERSION}-bin-hadoop3.tgz --directory /opt/spark --strip-components 1 \ @@ -51,8 +50,7 @@ RUN curl -s https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-spark-runt && mv iceberg-spark-runtime-${ICEBERG_SPARK_RUNTIME_VERSION}-${ICEBERG_VERSION}.jar /opt/spark/jars # Download AWS bundle -RUN curl -s https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-aws-bundle/${ICEBERG_VERSION}/iceberg-aws-bundle-${ICEBERG_VERSION}.jar -Lo iceberg-aws-bundle-${ICEBERG_VERSION}.jar \ - && mv iceberg-aws-bundle-${ICEBERG_VERSION}.jar /opt/spark/jars +RUN curl -s https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-aws-bundle/${ICEBERG_VERSION}/iceberg-aws-bundle-${ICEBERG_VERSION}.jar -Lo /opt/spark/jars/iceberg-aws-bundle-${ICEBERG_VERSION}.jar COPY spark-defaults.conf /opt/spark/conf ENV PATH="/opt/spark/sbin:/opt/spark/bin:${PATH}" @@ -62,7 +60,7 @@ RUN chmod u+x /opt/spark/sbin/* && \ RUN pip3 install -q ipython -RUN pip3 install "pyiceberg[s3fs]==${PYICEBERG_VERSION}" +RUN pip3 install "pyiceberg[s3fs,hive]==${PYICEBERG_VERSION}" COPY entrypoint.sh . COPY provision.py . diff --git a/dev/docker-compose-integration.yml b/dev/docker-compose-integration.yml index 658bd698c9..fccdcdc757 100644 --- a/dev/docker-compose-integration.yml +++ b/dev/docker-compose-integration.yml @@ -25,6 +25,7 @@ services: iceberg_net: depends_on: - rest + - hive - minio volumes: - ./warehouse:/home/iceberg/warehouse @@ -37,6 +38,7 @@ services: - 8080:8080 links: - rest:rest + - hive:hive - minio:minio rest: image: tabulario/iceberg-rest @@ -85,5 +87,17 @@ services: /usr/bin/mc policy set public minio/warehouse; tail -f /dev/null " + hive: + build: hive/ + container_name: hive + hostname: hive + networks: + iceberg_net: + ports: + - 9083:9083 + environment: + SERVICE_NAME: "metastore" + SERVICE_OPTS: "-Dmetastore.warehouse.dir=s3a://warehouse/hive/" + networks: iceberg_net: diff --git a/dev/hive/Dockerfile b/dev/hive/Dockerfile new file mode 100644 index 0000000000..ee633934c3 --- /dev/null +++ b/dev/hive/Dockerfile @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM openjdk:8-jre-slim AS build + +RUN apt-get update -qq && apt-get -qq -y install curl + +ENV AWSSDK_VERSION=2.20.18 +ENV HADOOP_VERSION=3.1.0 + +RUN curl https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.11.271/aws-java-sdk-bundle-1.11.271.jar -Lo /tmp/aws-java-sdk-bundle-1.11.271.jar +RUN curl https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/${HADOOP_VERSION}/hadoop-aws-${HADOOP_VERSION}.jar -Lo /tmp/hadoop-aws-${HADOOP_VERSION}.jar + + +FROM apache/hive:3.1.3 + +ENV AWSSDK_VERSION=2.20.18 +ENV HADOOP_VERSION=3.1.0 + +COPY --from=build /tmp/hadoop-aws-${HADOOP_VERSION}.jar /opt/hive/lib/hadoop-aws-${HADOOP_VERSION}.jar +COPY --from=build /tmp/aws-java-sdk-bundle-1.11.271.jar /opt/hive/lib/aws-java-sdk-bundle-1.11.271.jar +COPY core-site.xml /opt/hadoop/etc/hadoop/core-site.xml diff --git a/dev/hive/core-site.xml b/dev/hive/core-site.xml new file mode 100644 index 0000000000..b77332b83b --- /dev/null +++ b/dev/hive/core-site.xml @@ -0,0 +1,53 @@ + + + + + + + fs.defaultFS + s3a://warehouse/hive + + + fs.s3a.impl + org.apache.hadoop.fs.s3a.S3AFileSystem + + + fs.s3a.fast.upload + true + + + fs.s3a.endpoint + http://minio:9000 + + + fs.s3a.access.key + admin + + + fs.s3a.secret.key + password + + + fs.s3a.connection.ssl.enabled + false + + + fs.s3a.path.style.access + true + + diff --git a/dev/provision.py b/dev/provision.py index ca6e5aa6aa..44086caf20 100644 --- a/dev/provision.py +++ b/dev/provision.py @@ -24,300 +24,321 @@ spark = SparkSession.builder.getOrCreate() -spark.sql( - """ - CREATE DATABASE IF NOT EXISTS default; -""" -) - -schema = Schema( - NestedField(field_id=1, name="uuid_col", field_type=UUIDType(), required=False), - NestedField(field_id=2, name="fixed_col", field_type=FixedType(25), required=False), -) - -catalog = load_catalog( - "local", - **{ - "type": "rest", - "uri": "http://rest:8181", - "s3.endpoint": "http://minio:9000", - "s3.access-key-id": "admin", - "s3.secret-access-key": "password", - }, -) - -catalog.create_table(identifier="default.test_uuid_and_fixed_unpartitioned", schema=schema) - -spark.sql( - """ - INSERT INTO default.test_uuid_and_fixed_unpartitioned VALUES - ('102cb62f-e6f8-4eb0-9973-d9b012ff0967', CAST('1234567890123456789012345' AS BINARY)), - ('ec33e4b2-a834-4cc3-8c4a-a1d3bfc2f226', CAST('1231231231231231231231231' AS BINARY)), - ('639cccce-c9d2-494a-a78c-278ab234f024', CAST('12345678901234567ass12345' AS BINARY)), - ('c1b0d8e0-0b0e-4b1e-9b0a-0e0b0d0c0a0b', CAST('asdasasdads12312312312111' AS BINARY)), - ('923dae77-83d6-47cd-b4b0-d383e64ee57e', CAST('qweeqwwqq1231231231231111' AS BINARY)); +catalogs = { + 'rest': load_catalog( + "rest", + **{ + "type": "rest", + "uri": "http://rest:8181", + "s3.endpoint": "http://minio:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ), + 'hive': load_catalog( + "hive", + **{ + "type": "hive", + "uri": "http://hive:9083", + "s3.endpoint": "http://minio:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ), +} + +for catalog_name, catalog in catalogs.items(): + spark.sql( + f""" + CREATE DATABASE IF NOT EXISTS {catalog_name}.default; """ -) + ) -spark.sql( - """ - CREATE OR REPLACE TABLE default.test_null_nan - USING iceberg - AS SELECT - 1 AS idx, - float('NaN') AS col_numeric -UNION ALL SELECT - 2 AS idx, - null AS col_numeric -UNION ALL SELECT - 3 AS idx, - 1 AS col_numeric -""" -) - -spark.sql( - """ - CREATE OR REPLACE TABLE default.test_null_nan_rewritten - USING iceberg - AS SELECT * FROM default.test_null_nan -""" -) + schema = Schema( + NestedField(field_id=1, name="uuid_col", field_type=UUIDType(), required=False), + NestedField(field_id=2, name="fixed_col", field_type=FixedType(25), required=False), + ) -spark.sql( - """ -CREATE OR REPLACE TABLE default.test_limit as - SELECT * LATERAL VIEW explode(ARRAY(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) AS idx; -""" -) + catalog.create_table(identifier="default.test_uuid_and_fixed_unpartitioned", schema=schema) -spark.sql( - """ -CREATE OR REPLACE TABLE default.test_positional_mor_deletes ( - dt date, - number integer, - letter string -) -USING iceberg -TBLPROPERTIES ( - 'write.delete.mode'='merge-on-read', - 'write.update.mode'='merge-on-read', - 'write.merge.mode'='merge-on-read', - 'format-version'='2' -); -""" -) - -# Partitioning is not really needed, but there is a bug: -# https://github.com/apache/iceberg/pull/7685 -spark.sql( - """ - ALTER TABLE default.test_positional_mor_deletes ADD PARTITION FIELD years(dt) AS dt_years -""" -) + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_uuid_and_fixed_unpartitioned VALUES + ('102cb62f-e6f8-4eb0-9973-d9b012ff0967', CAST('1234567890123456789012345' AS BINARY)), + ('ec33e4b2-a834-4cc3-8c4a-a1d3bfc2f226', CAST('1231231231231231231231231' AS BINARY)), + ('639cccce-c9d2-494a-a78c-278ab234f024', CAST('12345678901234567ass12345' AS BINARY)), + ('c1b0d8e0-0b0e-4b1e-9b0a-0e0b0d0c0a0b', CAST('asdasasdads12312312312111' AS BINARY)), + ('923dae77-83d6-47cd-b4b0-d383e64ee57e', CAST('qweeqwwqq1231231231231111' AS BINARY)); + """ + ) -spark.sql( - """ -INSERT INTO default.test_positional_mor_deletes -VALUES - (CAST('2023-03-01' AS date), 1, 'a'), - (CAST('2023-03-02' AS date), 2, 'b'), - (CAST('2023-03-03' AS date), 3, 'c'), - (CAST('2023-03-04' AS date), 4, 'd'), - (CAST('2023-03-05' AS date), 5, 'e'), - (CAST('2023-03-06' AS date), 6, 'f'), - (CAST('2023-03-07' AS date), 7, 'g'), - (CAST('2023-03-08' AS date), 8, 'h'), - (CAST('2023-03-09' AS date), 9, 'i'), - (CAST('2023-03-10' AS date), 10, 'j'), - (CAST('2023-03-11' AS date), 11, 'k'), - (CAST('2023-03-12' AS date), 12, 'l'); -""" -) - -spark.sql( - """ -ALTER TABLE default.test_positional_mor_deletes CREATE TAG tag_12 + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_null_nan + USING iceberg + AS SELECT + 1 AS idx, + float('NaN') AS col_numeric + UNION ALL SELECT + 2 AS idx, + null AS col_numeric + UNION ALL SELECT + 3 AS idx, + 1 AS col_numeric """ -) + ) -spark.sql( - """ -ALTER TABLE default.test_positional_mor_deletes CREATE BRANCH without_5 + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_null_nan_rewritten + USING iceberg + AS SELECT * FROM default.test_null_nan """ -) + ) -spark.sql( + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_limit as + SELECT * LATERAL VIEW explode(ARRAY(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) AS idx; """ -DELETE FROM default.test_positional_mor_deletes.branch_without_5 WHERE number = 5 + ) + + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_positional_mor_deletes ( + dt date, + number integer, + letter string + ) + USING iceberg + TBLPROPERTIES ( + 'write.delete.mode'='merge-on-read', + 'write.update.mode'='merge-on-read', + 'write.merge.mode'='merge-on-read', + 'format-version'='2' + ); """ -) + ) + # Partitioning is not really needed, but there is a bug: + # https://github.com/apache/iceberg/pull/7685 + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_deletes ADD PARTITION FIELD years(dt) AS dt_years") -spark.sql( + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_positional_mor_deletes + VALUES + (CAST('2023-03-01' AS date), 1, 'a'), + (CAST('2023-03-02' AS date), 2, 'b'), + (CAST('2023-03-03' AS date), 3, 'c'), + (CAST('2023-03-04' AS date), 4, 'd'), + (CAST('2023-03-05' AS date), 5, 'e'), + (CAST('2023-03-06' AS date), 6, 'f'), + (CAST('2023-03-07' AS date), 7, 'g'), + (CAST('2023-03-08' AS date), 8, 'h'), + (CAST('2023-03-09' AS date), 9, 'i'), + (CAST('2023-03-10' AS date), 10, 'j'), + (CAST('2023-03-11' AS date), 11, 'k'), + (CAST('2023-03-12' AS date), 12, 'l'); """ -DELETE FROM default.test_positional_mor_deletes WHERE number = 9 -""" -) + ) -spark.sql( - """ - CREATE OR REPLACE TABLE default.test_positional_mor_double_deletes ( - dt date, - number integer, - letter string - ) - USING iceberg - TBLPROPERTIES ( - 'write.delete.mode'='merge-on-read', - 'write.update.mode'='merge-on-read', - 'write.merge.mode'='merge-on-read', - 'format-version'='2' - ); -""" -) - -# Partitioning is not really needed, but there is a bug: -# https://github.com/apache/iceberg/pull/7685 -spark.sql( - """ - ALTER TABLE default.test_positional_mor_double_deletes ADD PARTITION FIELD years(dt) AS dt_years -""" -) + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_deletes CREATE TAG tag_12") -spark.sql( - """ -INSERT INTO default.test_positional_mor_double_deletes -VALUES - (CAST('2023-03-01' AS date), 1, 'a'), - (CAST('2023-03-02' AS date), 2, 'b'), - (CAST('2023-03-03' AS date), 3, 'c'), - (CAST('2023-03-04' AS date), 4, 'd'), - (CAST('2023-03-05' AS date), 5, 'e'), - (CAST('2023-03-06' AS date), 6, 'f'), - (CAST('2023-03-07' AS date), 7, 'g'), - (CAST('2023-03-08' AS date), 8, 'h'), - (CAST('2023-03-09' AS date), 9, 'i'), - (CAST('2023-03-10' AS date), 10, 'j'), - (CAST('2023-03-11' AS date), 11, 'k'), - (CAST('2023-03-12' AS date), 12, 'l'); -""" -) - -spark.sql( + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_deletes CREATE BRANCH without_5") + + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_deletes.branch_without_5 WHERE number = 5") + + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_deletes WHERE number = 9") + + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_positional_mor_double_deletes ( + dt date, + number integer, + letter string + ) + USING iceberg + TBLPROPERTIES ( + 'write.delete.mode'='merge-on-read', + 'write.update.mode'='merge-on-read', + 'write.merge.mode'='merge-on-read', + 'format-version'='2' + ); """ - DELETE FROM default.test_positional_mor_double_deletes WHERE number = 9 -""" -) + ) + + # Partitioning is not really needed, but there is a bug: + # https://github.com/apache/iceberg/pull/7685 + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_double_deletes ADD PARTITION FIELD years(dt) AS dt_years") -spark.sql( + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_positional_mor_double_deletes + VALUES + (CAST('2023-03-01' AS date), 1, 'a'), + (CAST('2023-03-02' AS date), 2, 'b'), + (CAST('2023-03-03' AS date), 3, 'c'), + (CAST('2023-03-04' AS date), 4, 'd'), + (CAST('2023-03-05' AS date), 5, 'e'), + (CAST('2023-03-06' AS date), 6, 'f'), + (CAST('2023-03-07' AS date), 7, 'g'), + (CAST('2023-03-08' AS date), 8, 'h'), + (CAST('2023-03-09' AS date), 9, 'i'), + (CAST('2023-03-10' AS date), 10, 'j'), + (CAST('2023-03-11' AS date), 11, 'k'), + (CAST('2023-03-12' AS date), 12, 'l'); """ - DELETE FROM default.test_positional_mor_double_deletes WHERE letter == 'f' -""" -) - -all_types_dataframe = ( - spark.range(0, 5, 1, 5) - .withColumnRenamed("id", "longCol") - .withColumn("intCol", expr("CAST(longCol AS INT)")) - .withColumn("floatCol", expr("CAST(longCol AS FLOAT)")) - .withColumn("doubleCol", expr("CAST(longCol AS DOUBLE)")) - .withColumn("dateCol", date_add(current_date(), 1)) - .withColumn("timestampCol", expr("TO_TIMESTAMP(dateCol)")) - .withColumn("stringCol", expr("CAST(dateCol AS STRING)")) - .withColumn("booleanCol", expr("longCol > 5")) - .withColumn("binaryCol", expr("CAST(longCol AS BINARY)")) - .withColumn("byteCol", expr("CAST(longCol AS BYTE)")) - .withColumn("decimalCol", expr("CAST(longCol AS DECIMAL(10, 2))")) - .withColumn("shortCol", expr("CAST(longCol AS SHORT)")) - .withColumn("mapCol", expr("MAP(longCol, decimalCol)")) - .withColumn("arrayCol", expr("ARRAY(longCol)")) - .withColumn("structCol", expr("STRUCT(mapCol, arrayCol)")) -) - -all_types_dataframe.writeTo("default.test_all_types").tableProperty("format-version", "2").partitionedBy( - "intCol" -).createOrReplace() - -for table_name, partition in [ - ("test_partitioned_by_identity", "ts"), - ("test_partitioned_by_years", "years(dt)"), - ("test_partitioned_by_months", "months(dt)"), - ("test_partitioned_by_days", "days(ts)"), - ("test_partitioned_by_hours", "hours(ts)"), - ("test_partitioned_by_truncate", "truncate(1, letter)"), - ("test_partitioned_by_bucket", "bucket(16, number)"), -]: + ) + + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_double_deletes WHERE number = 9") + + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_double_deletes WHERE letter == 'f'") + + all_types_dataframe = ( + spark.range(0, 5, 1, 5) + .withColumnRenamed("id", "longCol") + .withColumn("intCol", expr("CAST(longCol AS INT)")) + .withColumn("floatCol", expr("CAST(longCol AS FLOAT)")) + .withColumn("doubleCol", expr("CAST(longCol AS DOUBLE)")) + .withColumn("dateCol", date_add(current_date(), 1)) + .withColumn("timestampCol", expr("TO_TIMESTAMP(dateCol)")) + .withColumn("stringCol", expr("CAST(dateCol AS STRING)")) + .withColumn("booleanCol", expr("longCol > 5")) + .withColumn("binaryCol", expr("CAST(longCol AS BINARY)")) + .withColumn("byteCol", expr("CAST(longCol AS BYTE)")) + .withColumn("decimalCol", expr("CAST(longCol AS DECIMAL(10, 2))")) + .withColumn("shortCol", expr("CAST(longCol AS SHORT)")) + .withColumn("mapCol", expr("MAP(longCol, decimalCol)")) + .withColumn("arrayCol", expr("ARRAY(longCol)")) + .withColumn("structCol", expr("STRUCT(mapCol, arrayCol)")) + ) + + all_types_dataframe.writeTo(f"{catalog_name}.default.test_all_types").tableProperty("format-version", "2").partitionedBy( + "intCol" + ).createOrReplace() + + for table_name, partition in [ + ("test_partitioned_by_identity", "ts"), + ("test_partitioned_by_years", "years(dt)"), + ("test_partitioned_by_months", "months(dt)"), + ("test_partitioned_by_days", "days(ts)"), + ("test_partitioned_by_hours", "hours(ts)"), + ("test_partitioned_by_truncate", "truncate(1, letter)"), + ("test_partitioned_by_bucket", "bucket(16, number)"), + ]: + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.{table_name} ( + dt date, + ts timestamp, + number integer, + letter string + ) + USING iceberg; + """ + ) + + spark.sql(f"ALTER TABLE {catalog_name}.default.{table_name} ADD PARTITION FIELD {partition}") + + spark.sql( + f""" + INSERT INTO {catalog_name}.default.{table_name} + VALUES + (CAST('2022-03-01' AS date), CAST('2022-03-01 01:22:00' AS timestamp), 1, 'a'), + (CAST('2022-03-02' AS date), CAST('2022-03-02 02:22:00' AS timestamp), 2, 'b'), + (CAST('2022-03-03' AS date), CAST('2022-03-03 03:22:00' AS timestamp), 3, 'c'), + (CAST('2022-03-04' AS date), CAST('2022-03-04 04:22:00' AS timestamp), 4, 'd'), + (CAST('2023-03-05' AS date), CAST('2023-03-05 05:22:00' AS timestamp), 5, 'e'), + (CAST('2023-03-06' AS date), CAST('2023-03-06 06:22:00' AS timestamp), 6, 'f'), + (CAST('2023-03-07' AS date), CAST('2023-03-07 07:22:00' AS timestamp), 7, 'g'), + (CAST('2023-03-08' AS date), CAST('2023-03-08 08:22:00' AS timestamp), 8, 'h'), + (CAST('2023-03-09' AS date), CAST('2023-03-09 09:22:00' AS timestamp), 9, 'i'), + (CAST('2023-03-10' AS date), CAST('2023-03-10 10:22:00' AS timestamp), 10, 'j'), + (CAST('2023-03-11' AS date), CAST('2023-03-11 11:22:00' AS timestamp), 11, 'k'), + (CAST('2023-03-12' AS date), CAST('2023-03-12 12:22:00' AS timestamp), 12, 'l'); + """ + ) + + # There is an issue with CREATE OR REPLACE + # https://github.com/apache/iceberg/issues/8756 + spark.sql(f"DROP TABLE IF EXISTS {catalog_name}.default.test_table_version") + spark.sql( f""" - CREATE OR REPLACE TABLE default.{table_name} ( + CREATE TABLE {catalog_name}.default.test_table_version ( dt date, - ts timestamp, number integer, letter string - ) - USING iceberg; + ) + USING iceberg + TBLPROPERTIES ( + 'format-version'='1' + ); + """ + ) + + spark.sql( + f""" + CREATE TABLE {catalog_name}.default.test_table_sanitized_character ( + `letter/abc` string + ) + USING iceberg + TBLPROPERTIES ( + 'format-version'='1' + ); """ ) - spark.sql(f"ALTER TABLE default.{table_name} ADD PARTITION FIELD {partition}") + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_table_sanitized_character + VALUES + ('123') + """ + ) spark.sql( f""" - INSERT INTO default.{table_name} + INSERT INTO {catalog_name}.default.test_table_sanitized_character VALUES - (CAST('2022-03-01' AS date), CAST('2022-03-01 01:22:00' AS timestamp), 1, 'a'), - (CAST('2022-03-02' AS date), CAST('2022-03-02 02:22:00' AS timestamp), 2, 'b'), - (CAST('2022-03-03' AS date), CAST('2022-03-03 03:22:00' AS timestamp), 3, 'c'), - (CAST('2022-03-04' AS date), CAST('2022-03-04 04:22:00' AS timestamp), 4, 'd'), - (CAST('2023-03-05' AS date), CAST('2023-03-05 05:22:00' AS timestamp), 5, 'e'), - (CAST('2023-03-06' AS date), CAST('2023-03-06 06:22:00' AS timestamp), 6, 'f'), - (CAST('2023-03-07' AS date), CAST('2023-03-07 07:22:00' AS timestamp), 7, 'g'), - (CAST('2023-03-08' AS date), CAST('2023-03-08 08:22:00' AS timestamp), 8, 'h'), - (CAST('2023-03-09' AS date), CAST('2023-03-09 09:22:00' AS timestamp), 9, 'i'), - (CAST('2023-03-10' AS date), CAST('2023-03-10 10:22:00' AS timestamp), 10, 'j'), - (CAST('2023-03-11' AS date), CAST('2023-03-11 11:22:00' AS timestamp), 11, 'k'), - (CAST('2023-03-12' AS date), CAST('2023-03-12 12:22:00' AS timestamp), 12, 'l'); + ('123') """ ) -# There is an issue with CREATE OR REPLACE -# https://github.com/apache/iceberg/issues/8756 -spark.sql( + spark.sql( + f""" + CREATE TABLE {catalog_name}.default.test_table_add_column ( + a string + ) + USING iceberg """ -DROP TABLE IF EXISTS default.test_table_version -""" -) + ) -spark.sql( + spark.sql(f"INSERT INTO {catalog_name}.default.test_table_add_column VALUES ('1')") + + spark.sql(f"ALTER TABLE {catalog_name}.default.test_table_add_column ADD COLUMN b string") + + spark.sql(f"INSERT INTO {catalog_name}.default.test_table_add_column VALUES ('2', '2')") + + spark.sql( + f""" + CREATE TABLE {catalog_name}.default.test_table_empty_list_and_map ( + col_list array, + col_map map, + col_list_with_struct array> + ) + USING iceberg + TBLPROPERTIES ( + 'format-version'='1' + ); """ -CREATE TABLE default.test_table_version ( - dt date, - number integer, - letter string -) -USING iceberg -TBLPROPERTIES ( - 'format-version'='1' -); -""" -) - -spark.sql( + ) + + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_table_empty_list_and_map + VALUES (null, null, null), + (array(), map(), array(struct(1))) """ -CREATE TABLE default.test_table_sanitized_character ( - `letter/abc` string -) -USING iceberg -TBLPROPERTIES ( - 'format-version'='1' -); -""" -) - -spark.sql( - f""" -INSERT INTO default.test_table_sanitized_character -VALUES - ('123') -""" -) + ) diff --git a/dev/spark-defaults.conf b/dev/spark-defaults.conf index 56c345432a..2316336fea 100644 --- a/dev/spark-defaults.conf +++ b/dev/spark-defaults.conf @@ -16,13 +16,19 @@ # spark.sql.extensions org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions -spark.sql.catalog.demo org.apache.iceberg.spark.SparkCatalog -spark.sql.catalog.demo.type rest -spark.sql.catalog.demo.uri http://rest:8181 -spark.sql.catalog.demo.io-impl org.apache.iceberg.aws.s3.S3FileIO -spark.sql.catalog.demo.warehouse s3://warehouse/wh/ -spark.sql.catalog.demo.s3.endpoint http://minio:9000 -spark.sql.defaultCatalog demo +spark.sql.catalog.rest org.apache.iceberg.spark.SparkCatalog +spark.sql.catalog.rest.type rest +spark.sql.catalog.rest.uri http://rest:8181 +spark.sql.catalog.rest.io-impl org.apache.iceberg.aws.s3.S3FileIO +spark.sql.catalog.rest.warehouse s3://warehouse/rest/ +spark.sql.catalog.rest.s3.endpoint http://minio:9000 +spark.sql.catalog.hive org.apache.iceberg.spark.SparkCatalog +spark.sql.catalog.hive.type hive +spark.sql.catalog.hive.uri http://hive:9083 +spark.sql.catalog.hive.io-impl org.apache.iceberg.aws.s3.S3FileIO +spark.sql.catalog.hive.warehouse s3://warehouse/hive/ +spark.sql.catalog.hive.s3.endpoint http://minio:9000 +spark.sql.defaultCatalog rest spark.eventLog.enabled true spark.eventLog.dir /home/iceberg/spark-events spark.history.fs.logDirectory /home/iceberg/spark-events diff --git a/mkdocs/docs/SUMMARY.md b/mkdocs/docs/SUMMARY.md index 46383823ec..5cf753d4c3 100644 --- a/mkdocs/docs/SUMMARY.md +++ b/mkdocs/docs/SUMMARY.md @@ -17,15 +17,19 @@ -- [Home](index.md) + + +- [Getting started](index.md) - [Configuration](configuration.md) - [CLI](cli.md) - [API](api.md) - [Contributing](contributing.md) -- [Feature support](feature-support.md) +- [Community](community.md) - Releases - [Verify a release](verify-release.md) - [How to release](how-to-release.md) - [Code Reference](reference/) + + diff --git a/mkdocs/docs/api.md b/mkdocs/docs/api.md index 517e52f185..724a45c52f 100644 --- a/mkdocs/docs/api.md +++ b/mkdocs/docs/api.md @@ -146,6 +146,25 @@ catalog.create_table( ) ``` +To create a table using a pyarrow schema: + +```python +import pyarrow as pa + +schema = pa.schema( + [ + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=False), + pa.field("baz", pa.bool_(), nullable=True), + ] +) + +catalog.create_table( + identifier="docs_example.bids", + schema=schema, +) +``` + ## Load a table ### Catalog table @@ -175,6 +194,104 @@ static_table = StaticTable.from_metadata( The static-table is considered read-only. +## Write support + +With PyIceberg 0.6.0 write support is added through Arrow. Let's consider an Arrow Table: + +```python +import pyarrow as pa + +df = pa.Table.from_pylist( + [ + {"city": "Amsterdam", "lat": 52.371807, "long": 4.896029}, + {"city": "San Francisco", "lat": 37.773972, "long": -122.431297}, + {"city": "Drachten", "lat": 53.11254, "long": 6.0989}, + {"city": "Paris", "lat": 48.864716, "long": 2.349014}, + ], +) +``` + +Next, create a table based on the schema: + +```python +from pyiceberg.catalog import load_catalog + +catalog = load_catalog("default") + +from pyiceberg.schema import Schema +from pyiceberg.types import NestedField, StringType, DoubleType + +schema = Schema( + NestedField(1, "city", StringType(), required=False), + NestedField(2, "lat", DoubleType(), required=False), + NestedField(3, "long", DoubleType(), required=False), +) + +tbl = catalog.create_table("default.cities", schema=schema) +``` + +Now write the data to the table: + + + +!!! note inline end "Fast append" + PyIceberg default to the [fast append](https://iceberg.apache.org/spec/#snapshots) to minimize the amount of data written. This enables quick writes, reducing the possibility of conflicts. The downside of the fast append is that it creates more metadata than a normal commit. [Compaction is planned](https://github.com/apache/iceberg-python/issues/270) and will automatically rewrite all the metadata when a threshold is hit, to maintain performant reads. + + + +```python +tbl.append(df) + +# or + +tbl.overwrite(df) +``` + +The data is written to the table, and when the table is read using `tbl.scan().to_arrow()`: + +``` +pyarrow.Table +city: string +lat: double +long: double +---- +city: [["Amsterdam","San Francisco","Drachten","Paris"]] +lat: [[52.371807,37.773972,53.11254,48.864716]] +long: [[4.896029,-122.431297,6.0989,2.349014]] +``` + +You both can use `append(df)` or `overwrite(df)` since there is no data yet. If we want to add more data, we can use `.append()` again: + +```python +df = pa.Table.from_pylist( + [{"city": "Groningen", "lat": 53.21917, "long": 6.56667}], +) + +tbl.append(df) +``` + +When reading the table `tbl.scan().to_arrow()` you can see that `Groningen` is now also part of the table: + +``` +pyarrow.Table +city: string +lat: double +long: double +---- +city: [["Amsterdam","San Francisco","Drachten","Paris"],["Groningen"]] +lat: [[52.371807,37.773972,53.11254,48.864716],[53.21917]] +long: [[4.896029,-122.431297,6.0989,2.349014],[6.56667]] +``` + +The nested lists indicate the different Arrow buffers, where the first write results into a buffer, and the second append in a separate buffer. This is expected since it will read two parquet files. + + + +!!! example "Under development" + Writing using PyIceberg is still under development. Support for [partial overwrites](https://github.com/apache/iceberg-python/issues/268) and writing to [partitioned tables](https://github.com/apache/iceberg-python/issues/208) is planned and being worked on. + + + ## Schema evolution PyIceberg supports full schema evolution through the Python API. It takes care of setting the field-IDs and makes sure that only non-breaking changes are done (can be overriden). @@ -195,6 +312,47 @@ with table.transaction() as transaction: # ... Update properties etc ``` +### Union by Name + +Using `.union_by_name()` you can merge another schema into an existing schema without having to worry about field-IDs: + +```python +from pyiceberg.catalog import load_catalog +from pyiceberg.schema import Schema +from pyiceberg.types import NestedField, StringType, DoubleType, LongType + +catalog = load_catalog() + +schema = Schema( + NestedField(1, "city", StringType(), required=False), + NestedField(2, "lat", DoubleType(), required=False), + NestedField(3, "long", DoubleType(), required=False), +) + +table = catalog.create_table("default.locations", schema) + +new_schema = Schema( + NestedField(1, "city", StringType(), required=False), + NestedField(2, "lat", DoubleType(), required=False), + NestedField(3, "long", DoubleType(), required=False), + NestedField(10, "population", LongType(), required=False), +) + +with table.update_schema() as update: + update.union_by_name(new_schema) +``` + +Now the table has the union of the two schemas `print(table.schema())`: + +``` +table { + 1: city: optional string + 2: lat: optional double + 3: long: optional double + 4: population: optional long +} +``` + ### Add column Using `add_column` you can add a column, without having to worry about the field-id: @@ -260,6 +418,63 @@ with table.update_schema(allow_incompatible_changes=True) as update: update.delete_column("some_field") ``` +## Partition evolution + +PyIceberg supports partition evolution. See the [partition evolution](https://iceberg.apache.org/spec/#partition-evolution) +for more details. + +The API to use when evolving partitions is the `update_spec` API on the table. + +```python +with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") +``` + +Updating the partition spec can also be done as part of a transaction with other operations. + +```python +with table.transaction() as transaction: + with transaction.update_spec() as update_spec: + update_spec.add_field("id", BucketTransform(16), "bucketed_id") + update_spec.add_field("event_ts", DayTransform(), "day_ts") + # ... Update properties etc +``` + +### Add fields + +New partition fields can be added via the `add_field` API which takes in the field name to partition on, +the partition transform, and an optional partition name. If the partition name is not specified, +one will be created. + +```python +with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + # identity is a shortcut API for adding an IdentityTransform + update.identity("some_field") +``` + +### Remove fields + +Partition fields can also be removed via the `remove_field` API if it no longer makes sense to partition on those fields. + +```python +with table.update_spec() as update:some_partition_name + # Remove the partition field with the name + update.remove_field("some_partition_name") +``` + +### Rename fields + +Partition fields can also be renamed via the `rename_field` API. + +```python +with table.update_spec() as update: + # Rename the partition field with the name bucketed_id to sharded_id + update.rename_field("bucketed_id", "sharded_id") +``` + ## Table properties Set and remove properties through the `Transaction` API: @@ -478,3 +693,56 @@ print(ray_dataset.take(2)) }, ] ``` + +### Daft + +PyIceberg interfaces closely with Daft Dataframes (see also: [Daft integration with Iceberg](https://www.getdaft.io/projects/docs/en/latest/user_guide/integrations/iceberg.html)) which provides a full lazily optimized query engine interface on top of PyIceberg tables. + + + +!!! note "Requirements" + This requires [Daft to be installed](index.md). + + + +A table can be read easily into a Daft Dataframe: + +```python +df = table.to_daft() # equivalent to `daft.read_iceberg(table)` +df = df.where(df["trip_distance"] >= 10.0) +df = df.select("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime") +``` + +This returns a Daft Dataframe which is lazily materialized. Printing `df` will display the schema: + +``` +╭──────────┬───────────────────────────────┬───────────────────────────────╮ +│ VendorID ┆ tpep_pickup_datetime ┆ tpep_dropoff_datetime │ +│ --- ┆ --- ┆ --- │ +│ Int64 ┆ Timestamp(Microseconds, None) ┆ Timestamp(Microseconds, None) │ +╰──────────┴───────────────────────────────┴───────────────────────────────╯ + +(No data to display: Dataframe not materialized) +``` + +We can execute the Dataframe to preview the first few rows of the query with `df.show()`. + +This is correctly optimized to take advantage of Iceberg features such as hidden partitioning and file-level statistics for efficient reads. + +```python +df.show(2) +``` + +``` +╭──────────┬───────────────────────────────┬───────────────────────────────╮ +│ VendorID ┆ tpep_pickup_datetime ┆ tpep_dropoff_datetime │ +│ --- ┆ --- ┆ --- │ +│ Int64 ┆ Timestamp(Microseconds, None) ┆ Timestamp(Microseconds, None) │ +╞══════════╪═══════════════════════════════╪═══════════════════════════════╡ +│ 2 ┆ 2008-12-31T23:23:50.000000 ┆ 2009-01-01T00:34:31.000000 │ +├╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┤ +│ 2 ┆ 2008-12-31T23:05:03.000000 ┆ 2009-01-01T16:10:18.000000 │ +╰──────────┴───────────────────────────────┴───────────────────────────────╯ + +(Showing first 2 rows) +``` diff --git a/mkdocs/docs/cli.md b/mkdocs/docs/cli.md index 695011a6ef..28e44955d7 100644 --- a/mkdocs/docs/cli.md +++ b/mkdocs/docs/cli.md @@ -36,6 +36,7 @@ Options: --catalog TEXT --verbose BOOLEAN --output [text|json] +--ugi TEXT --uri TEXT --credential TEXT --help Show this message and exit. diff --git a/mkdocs/docs/community.md b/mkdocs/docs/community.md new file mode 100644 index 0000000000..4c542bff8e --- /dev/null +++ b/mkdocs/docs/community.md @@ -0,0 +1,64 @@ +--- +hide: + - navigation +--- + + + +# Join the community + +Apache Iceberg tracks issues in GitHub and prefers to receive contributions as pull requests. + +Community discussions happen primarily on the [dev mailing list](https://lists.apache.org/list.html?dev@iceberg.apache.org), on [Apache Iceberg Slack workspace](https://join.slack.com/t/apache-iceberg/shared_invite/zt-287g3akar-K9Oe_En5j1UL7Y_Ikpai3A) in the #python channel, and on specific [GitHub issues](https://github.com/apache/iceberg-python/issues). + +## Iceberg Community Events + +The PyIceberg community sync is on the last Tuesday of every month. To join, make sure to subscribe to the [iceberg-python-sync Google group](https://groups.google.com/g/iceberg-python-sync). + +## Community Guidelines + +### Apache Iceberg Community Guidelines + +The Apache Iceberg community is built on the principles described in the [Apache Way](https://www.apache.org/theapacheway/index.html) +and all who engage with the community are expected to be respectful, open, come with the best interests of the community in mind, +and abide by the Apache Foundation [Code of Conduct](https://www.apache.org/foundation/policies/conduct.html). + +### Participants with Corporate Interests + +A wide range of corporate entities have interests that overlap in both features and frameworks related to Iceberg and while we +encourage engagement and contributions, the community is not a venue for marketing, solicitation, or recruitment. + +Any vendor who wants to participate in the Apache Iceberg community Slack workspace should create a dedicated vendor channel +for their organization prefixed by `vendor-`. + +This space can be used to discuss features and integration with Iceberg related to the vendor offering. This space should not +be used to promote competing vendor products/services or disparage other vendor offerings. Discussion should be focused on +questions asked by the community and not to expand/introduce/redirect users to alternate offerings. + +### Marketing / Solicitation / Recruiting + +The Apache Iceberg community is a space for everyone to operate free of influence. The development lists, Slack workspace, +and GitHub should not be used to market products or services. Solicitation or overt promotion should not be performed in common +channels or through direct messages. + +Recruitment of community members should not be conducted through direct messages or community channels, but opportunities +related to contributing to or using Iceberg can be posted to the `#jobs` channel. + +For questions regarding any of the guidelines above, please contact a PMC member diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index 801bb8fc87..93b198c328 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -46,7 +46,22 @@ The environment variable picked up by Iceberg starts with `PYICEBERG_` and then For example, `PYICEBERG_CATALOG__DEFAULT__S3__ACCESS_KEY_ID`, sets `s3.access-key-id` on the `default` catalog. -## FileIO +# Tables + +Iceberg tables support table properties to configure table behavior. + +## Write options + +| Key | Options | Default | Description | +| --------------------------------- | --------------------------------- | ------- | ------------------------------------------------------------------------------------------- | +| `write.parquet.compression-codec` | `{uncompressed,zstd,gzip,snappy}` | zstd | Sets the Parquet compression coddec. | +| `write.parquet.compression-level` | Integer | null | Parquet compression level for the codec. If not set, it is up to PyIceberg | +| `write.parquet.page-size-bytes` | Size in bytes | 1MB | Set a target threshold for the approximate encoded size of data pages within a column chunk | +| `write.parquet.page-row-limit` | Number of rows | 20000 | Set a target threshold for the approximate encoded size of data pages within a column chunk | +| `write.parquet.dict-size-bytes` | Size in bytes | 2MB | Set the dictionary page size limit per row group | +| `write.parquet.row-group-limit` | Number of rows | 122880 | The Parquet row group limit | + +# FileIO Iceberg works with the concept of a FileIO which is a pluggable module for reading, writing, and deleting files. By default, PyIceberg will try to initialize the FileIO that's suitable for the scheme (`s3://`, `gs://`, etc.) and will use the first one that's installed. @@ -66,6 +81,8 @@ For the FileIO there are several configuration options available: ### S3 + + | Key | Example | Description | | -------------------- | ------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | s3.endpoint | https://10.0.19.25/ | Configure an alternative endpoint of the S3 service for the FileIO to access. This could be used to use S3FileIO with any s3-compatible object storage service that has a different endpoint, or access a private S3 endpoint in a virtual private cloud. | @@ -76,8 +93,12 @@ For the FileIO there are several configuration options available: | s3.proxy-uri | http://my.proxy.com:8080 | Configure the proxy server to be used by the FileIO. | | s3.connect-timeout | 60.0 | Configure socket connection timeout, in seconds. | + + ### HDFS + + | Key | Example | Description | | -------------------- | ------------------- | ------------------------------------------------ | | hdfs.host | https://10.0.19.25/ | Configure the HDFS host to connect to | @@ -85,8 +106,12 @@ For the FileIO there are several configuration options available: | hdfs.user | user | Configure the HDFS username used for connection. | | hdfs.kerberos_ticket | kerberos_ticket | Configure the path to the Kerberos ticket cache. | + + ### Azure Data lake + + | Key | Example | Description | | ----------------------- | ----------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | adlfs.connection-string | AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqF...;BlobEndpoint=http://localhost/ | A [connection string](https://learn.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string). This could be used to use FileIO with any adlfs-compatible object storage service that has a different endpoint (like [azurite](https://github.com/azure/azurite)). | @@ -97,8 +122,12 @@ For the FileIO there are several configuration options available: | adlfs.client-id | ad667be4-b811-11ed-afa1-0242ac120002 | The client-id | | adlfs.client-secret | oCA3R6P\*ka#oa1Sms2J74z... | The client-secret | + + ### Google Cloud Storage + + | Key | Example | Description | | -------------------------- | ------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | gcs.project-id | my-gcp-project | Configure Google Cloud Project for GCS FileIO. | @@ -113,6 +142,8 @@ For the FileIO there are several configuration options available: | gcs.default-location | US | Configure the default location where buckets are created, like 'US' or 'EUROPE-WEST3'. | | gcs.version-aware | False | Configure whether to support object versioning on the GCS bucket. | + + ## REST Catalog ```yaml @@ -130,14 +161,36 @@ catalog: cabundle: /absolute/path/to/cabundle.pem ``` -| Key | Example | Description | -| ------------------- | ----------------------- | -------------------------------------------------------------------------- | -| uri | https://rest-catalog/ws | URI identifying the REST Server | -| credential | t-1234:secret | Credential to use for OAuth2 credential flow when initializing the catalog | -| token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | -| rest.sigv4-enabled | true | Sign requests to the REST Server using AWS SigV4 protocol | -| rest.signing-region | us-east-1 | The region to use when SigV4 signing a request | -| rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | + + +| Key | Example | Description | +| ---------------------- | -------------------------------- | -------------------------------------------------------------------------------------------------- | +| uri | https://rest-catalog/ws | URI identifying the REST Server | +| ugi | t-1234:secret | Hadoop UGI for Hive client. | +| credential | t-1234:secret | Credential to use for OAuth2 credential flow when initializing the catalog | +| token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | +| scope | openid offline corpds:ds:profile | Desired scope of the requested security token (default : catalog) | +| resource | rest_catalog.iceberg.com | URI for the target resource or service | +| audience | rest_catalog | Logical name of target resource or service | +| rest.sigv4-enabled | true | Sign requests to the REST Server using AWS SigV4 protocol | +| rest.signing-region | us-east-1 | The region to use when SigV4 signing a request | +| rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | +| rest.authorization-url | https://auth-service/cc | Authentication URL to use for client credentials authentication (default: uri + 'v1/oauth/tokens') | + + + +### Headers in RESTCatalog + +To configure custom headers in RESTCatalog, include them in the catalog properties with the prefix `header.`. This +ensures that all HTTP requests to the REST service include the specified headers. + +```yaml +catalog: + default: + uri: http://rest-catalog/ws/ + credential: t-1234:secret + header.content-type: application/vnd.api+json +``` ## SQL Catalog @@ -217,6 +270,23 @@ catalog: table-name: iceberg ``` +If you prefer to pass the credentials explicitly to the client instead of relying on environment variables, + +```yaml +catalog: + default: + type: dynamodb + table-name: iceberg + aws_access_key_id: + aws_secret_access_key: + aws_session_token: + region_name: +``` + # Concurrency PyIceberg uses multiple threads to parallelize operations. The number of workers can be configured by supplying a `max-workers` entry in the configuration file, or by setting the `PYICEBERG_MAX_WORKERS` environment variable. The default value depends on the system hardware and Python version. See [the Python documentation](https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor) for more details. + +# Backward Compatibility + +Previous versions of Java (`<1.4.0`) implementations incorrectly assume the optional attribute `current-snapshot-id` to be a required attribute in TableMetadata. This means that if `current-snapshot-id` is missing in the metadata file (e.g. on table creation), the application will throw an exception without being able to load the table. This assumption has been corrected in more recent Iceberg versions. However, it is possible to force PyIceberg to create a table with a metadata file that will be compatible with previous versions. This can be configured by setting the `legacy-current-snapshot-id` entry as "True" in the configuration file, or by setting the `LEGACY_CURRENT_SNAPSHOT_ID` environment variable. Refer to the [PR discussion](https://github.com/apache/iceberg-python/pull/473) for more details on the issue diff --git a/mkdocs/docs/contributing.md b/mkdocs/docs/contributing.md index 3973b763a0..7411382d2c 100644 --- a/mkdocs/docs/contributing.md +++ b/mkdocs/docs/contributing.md @@ -30,10 +30,12 @@ For the development, Poetry is used for packing and dependency management. You c pip install poetry ``` -If you have an older version of pip and virtualenv you need to update these: +Make sure you're using an up-to-date environment from venv ```bash pip install --upgrade virtualenv pip +python -m venv ./venv +source ./venv/bin/activate ``` To get started, you can run `make install`, which installs Poetry and all the dependencies of the Iceberg library. This also installs the development dependencies. If you don't want to install the development dependencies, you need to install using `poetry install --no-dev`. @@ -56,6 +58,22 @@ For IDEA ≤2021 you need to install the [Poetry integration as a plugin](https: Now you're set using Poetry, and all the tests will run in Poetry, and you'll have syntax highlighting in the pyproject.toml to indicate stale dependencies. +## Installation from source + +Clone the repository for local development: + +```sh +git clone https://github.com/apache/iceberg-python.git +cd iceberg-python +pip3 install -e ".[s3fs,hive]" +``` + +Install it directly for GitHub (not recommended), but sometimes handy: + +``` +pip install "git+https://github.com/apache/iceberg-python.git#egg=pyiceberg[s3fs]" +``` + ## Linting `pre-commit` is used for autoformatting and linting: diff --git a/mkdocs/docs/feature-support.md b/mkdocs/docs/feature-support.md deleted file mode 100644 index 9c466c9337..0000000000 --- a/mkdocs/docs/feature-support.md +++ /dev/null @@ -1,90 +0,0 @@ ---- -hide: - - navigation ---- - - - -# Feature Support - -The goal is that the python library will provide a functional, performant subset of the Java library. The initial focus has been on reading table metadata and provide a convenient CLI to go through the catalog. - -## Metadata - -| Operation | Java | Python | -| :----------------------- | :--: | :----: | -| Get Schema | X | X | -| Get Snapshots | X | X | -| Plan Scan | X | X | -| Plan Scan for Snapshot | X | X | -| Update Current Snapshot | X | | -| Create Table | X | X | -| Rename Table | X | X | -| Drop Table | X | X | -| Alter Table | X | | -| Set Table Properties | X | | -| Create Namespace | X | X | -| Drop Namespace | X | X | -| Set Namespace Properties | X | X | - -## Types - -The types are kept in `pyiceberg.types`. - -Primitive types: - -- `BooleanType` -- `StringType` -- `IntegerType` -- `LongType` -- `FloatType` -- `DoubleType` -- `DateType` -- `TimeType` -- `TimestampType` -- `TimestamptzType` -- `BinaryType` -- `UUIDType` - -Complex types: - -- `StructType` -- `ListType` -- `MapType` -- `FixedType(16)` -- `DecimalType(8, 3)` - -## Expressions - -The expressions are kept in `pyiceberg.expressions`. - -- `IsNull` -- `NotNull` -- `IsNaN` -- `NotNaN` -- `In` -- `NotIn` -- `EqualTo` -- `NotEqualTo` -- `GreaterThanOrEqual` -- `GreaterThan` -- `LessThanOrEqual` -- `LessThan` -- `And` -- `Or` -- `Not` diff --git a/mkdocs/docs/how-to-release.md b/mkdocs/docs/how-to-release.md index 9cecad2d5d..e41250a1f9 100644 --- a/mkdocs/docs/how-to-release.md +++ b/mkdocs/docs/how-to-release.md @@ -29,7 +29,17 @@ Make sure that the version is correct in `pyproject.toml` and `pyiceberg/__init_ ### Setting the tag -First set the tag on the commit: +Make sure that you're on the right branch, and the latest branch: + +For a Major/Minor release, make sure that you're on `main`, for patch versions the branch corresponding to the version that you want to patch, i.e. `pyiceberg-0.6.x`. + +```bash +git checkout +git fetch --all +git reset --hard apache/ +``` + +Set the tag on the last commit: ```bash export RC=rc1 @@ -136,11 +146,37 @@ cat release-announcement-email.txt ## Vote has passed -Once the vote has been passed, the latest version can be pushed to PyPi. Check out the Apache SVN and make sure to publish the right version with `twine`: +Once the vote has been passed, you can close the vote thread by concluding it: + +``` +Thanks everyone for voting! The 72 hours have passed, and a minimum of 3 binding votes have been cast: + ++1 Foo Bar (non-binding) +... ++1 Fokko Driesprong (binding) + +The release candidate has been accepted as PyIceberg . Thanks everyone, when all artifacts are published the announcement will be sent out. + +Kind regards, +``` + +### Copy the artifacts to the release dist + +``` +svn checkout https://dist.apache.org/repos/dist/dev/iceberg /tmp/iceberg-dist-dev +svn checkout https://dist.apache.org/repos/dist/release/iceberg/ /tmp/iceberg-dist-release + +mkdir -p /tmp/iceberg-dist-release/pyiceberg- +cp -r /tmp/iceberg-dist-dev/pyiceberg-rcN/* /tmp/iceberg-dist-release/pyiceberg- + +svn add /tmp/iceberg-dist-release/ +svn ci -m "PyIceberg " /tmp/iceberg-dist-release/ +``` + +The latest version can be pushed to PyPi. Check out the Apache SVN and make sure to publish the right version with `twine`: ```bash -svn checkout https://dist.apache.org/repos/dist/dev/iceberg /tmp/ -twine upload -s /tmp/iceberg/pyiceberg-0.1.0rc1 +twine upload -s /tmp/iceberg-dist-release/pyiceberg-/* ``` Send out an announcement on the dev mail list: diff --git a/mkdocs/docs/index.md b/mkdocs/docs/index.md index 628f4f7dd4..1fee9cc69b 100644 --- a/mkdocs/docs/index.md +++ b/mkdocs/docs/index.md @@ -20,11 +20,11 @@ hide: - limitations under the License. --> -# PyIceberg +# Getting started with PyIceberg PyIceberg is a Python implementation for accessing Iceberg tables, without the need of a JVM. -## Install +## Installation Before installing PyIceberg, make sure that you're on an up-to-date version of `pip`: @@ -38,36 +38,156 @@ You can install the latest release version from pypi: pip install "pyiceberg[s3fs,hive]" ``` -Install it directly for GitHub (not recommended), but sometimes handy: +You can mix and match optional dependencies depending on your needs: + +| Key | Description: | +| ------------ | -------------------------------------------------------------------- | +| hive | Support for the Hive metastore | +| glue | Support for AWS Glue | +| dynamodb | Support for AWS DynamoDB | +| sql-postgres | Support for SQL Catalog backed by Postgresql | +| sql-sqlite | Support for SQL Catalog backed by SQLite | +| pyarrow | PyArrow as a FileIO implementation to interact with the object store | +| pandas | Installs both PyArrow and Pandas | +| duckdb | Installs both PyArrow and DuckDB | +| ray | Installs PyArrow, Pandas, and Ray | +| daft | Installs Daft | +| s3fs | S3FS as a FileIO implementation to interact with the object store | +| adlfs | ADLFS as a FileIO implementation to interact with the object store | +| snappy | Support for snappy Avro compression | +| gcsfs | GCSFS as a FileIO implementation to interact with the object store | + +You either need to install `s3fs`, `adlfs`, `gcsfs`, or `pyarrow` to be able to fetch files from an object store. + +## Connecting to a catalog + +Iceberg leverages the [catalog to have one centralized place to organize the tables](https://iceberg.apache.org/concepts/catalog/). This can be a traditional Hive catalog to store your Iceberg tables next to the rest, a vendor solution like the AWS Glue catalog, or an implementation of Icebergs' own [REST protocol](https://github.com/apache/iceberg/tree/main/open-api). Checkout the [configuration](configuration.md) page to find all the configuration details. + +For the sake of demonstration, we'll configure the catalog to use the `SqlCatalog` implementation, which will store information in a local `sqlite` database. We'll also configure the catalog to store data files in the local filesystem instead of an object store. This should not be used in production due to the limited scalability. +Create a temporary location for Iceberg: + +```shell +mkdir /tmp/warehouse ``` -pip install "git+https://github.com/apache/iceberg-python.git#egg=pyiceberg[s3fs]" + +Open a Python 3 REPL to set up the catalog: + +```python +from pyiceberg.catalog.sql import SqlCatalog + +warehouse_path = "/tmp/warehouse" +catalog = SqlCatalog( + "default", + **{ + "uri": f"sqlite:///{warehouse_path}/pyiceberg_catalog.db", + "warehouse": f"file://{warehouse_path}", + }, +) ``` -Or clone the repository for local development: +## Write a PyArrow dataframe -```sh -git clone https://github.com/apache/iceberg-python.git -cd iceberg-python -pip3 install -e ".[s3fs,hive]" +Let's take the Taxi dataset, and write this to an Iceberg table. + +First download one month of data: + +```shell +curl https://d37ci6vzurychx.cloudfront.net/trip-data/yellow_tripdata_2023-01.parquet -o /tmp/yellow_tripdata_2023-01.parquet ``` -You can mix and match optional dependencies depending on your needs: +Load it into your PyArrow dataframe: + +```python +import pyarrow.parquet as pq + +df = pq.read_table("/tmp/yellow_tripdata_2023-01.parquet") +``` + +Create a new Iceberg table: + +```python +catalog.create_namespace("default") + +table = catalog.create_table( + "default.taxi_dataset", + schema=df.schema, +) +``` + +Append the dataframe to the table: + +```python +table.append(df) +len(table.scan().to_arrow()) +``` + +3066766 rows have been written to the table. + +Now generate a tip-per-mile feature to train the model on: + +```python +import pyarrow.compute as pc + +df = df.append_column("tip_per_mile", pc.divide(df["tip_amount"], df["trip_distance"])) +``` + +Evolve the schema of the table with the new column: + +```python +with table.update_schema() as update_schema: + update_schema.union_by_name(df.schema) +``` + +And now we can write the new dataframe to the Iceberg table: + +```python +table.overwrite(df) +print(table.scan().to_arrow()) +``` + +And the new column is there: + +``` +taxi_dataset( + 1: VendorID: optional long, + 2: tpep_pickup_datetime: optional timestamp, + 3: tpep_dropoff_datetime: optional timestamp, + 4: passenger_count: optional double, + 5: trip_distance: optional double, + 6: RatecodeID: optional double, + 7: store_and_fwd_flag: optional string, + 8: PULocationID: optional long, + 9: DOLocationID: optional long, + 10: payment_type: optional long, + 11: fare_amount: optional double, + 12: extra: optional double, + 13: mta_tax: optional double, + 14: tip_amount: optional double, + 15: tolls_amount: optional double, + 16: improvement_surcharge: optional double, + 17: total_amount: optional double, + 18: congestion_surcharge: optional double, + 19: airport_fee: optional double, + 20: tip_per_mile: optional double +), +``` + +And we can see that 2371784 rows have a tip-per-mile: + +```python +df = table.scan(row_filter="tip_per_mile > 0").to_arrow() +len(df) +``` + +### Explore Iceberg data and metadata files + +Since the catalog was configured to use the local filesystem, we can explore how Iceberg saved data and metadata files from the above operations. + +```shell +find /tmp/warehouse/ +``` + +## More details -| Key | Description: | -| -------- | -------------------------------------------------------------------- | -| hive | Support for the Hive metastore | -| glue | Support for AWS Glue | -| dynamodb | Support for AWS DynamoDB | -| pyarrow | PyArrow as a FileIO implementation to interact with the object store | -| pandas | Installs both PyArrow and Pandas | -| duckdb | Installs both PyArrow and DuckDB | -| ray | Installs PyArrow, Pandas, and Ray | -| s3fs | S3FS as a FileIO implementation to interact with the object store | -| adlfs | ADLFS as a FileIO implementation to interact with the object store | -| snappy | Support for snappy Avro compression | -| gcs | GCS as the FileIO implementation to interact with the object store | - -You either need to install `s3fs`, `adlfs`, `gcs`, or `pyarrow` for fetching files. - -There is both a [CLI](cli.md) and [Python API](api.md) available. +For the details, please check the [CLI](cli.md) or [Python API](api.md) page. diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 3692b7c92d..e2099b7c58 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -16,13 +16,13 @@ # under the License. mkdocs==1.5.3 -griffe==0.38.1 -jinja2==3.1.2 +griffe==0.40.1 +jinja2==3.1.3 mkdocstrings==0.24.0 -mkdocstrings-python==1.7.5 +mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 -mkdocs-material==9.5.3 +mkdocs-material==9.5.10 mkdocs-material-extensions==1.3.1 mkdocs-section-index==0.3.8 diff --git a/poetry.lock b/poetry.lock index f6aa09cb01..4908aff6b7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "adlfs" -version = "2023.10.0" +version = "2024.2.0" description = "Access Azure Datalake Gen1 with fsspec and dask" optional = true python-versions = ">=3.8" files = [ - {file = "adlfs-2023.10.0-py3-none-any.whl", hash = "sha256:dfdc8cc782bd78262435fb1bc2a8cfdbdd80342bb1b1ae9dfff968de912b0b09"}, - {file = "adlfs-2023.10.0.tar.gz", hash = "sha256:f5cf06c5b0074d17d43838d4c434791a98420d9e768b36a1a02c7b3930686543"}, + {file = "adlfs-2024.2.0-py3-none-any.whl", hash = "sha256:6514fb147032eea843e4b773c557df90cf6a929185edc7c97f7a12b91bfc1fa1"}, + {file = "adlfs-2024.2.0.tar.gz", hash = "sha256:860f5ddbd7f3c2553d84a101717dc5736e823305e0d51e8c0058bc85a7fa304d"}, ] [package.dependencies] @@ -17,115 +17,116 @@ azure-core = ">=1.23.1,<2.0.0" azure-datalake-store = ">=0.0.46,<0.1" azure-identity = "*" azure-storage-blob = ">=12.12.0" -fsspec = ">=2023.9.0" +fsspec = ">=2023.12.0" [package.extras] docs = ["furo", "myst-parser", "numpydoc", "sphinx"] +tests = ["arrow", "dask[dataframe]", "docker", "pytest", "pytest-mock"] [[package]] name = "aiobotocore" -version = "2.5.4" +version = "2.11.2" description = "Async client for aws services using botocore and aiohttp" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "aiobotocore-2.5.4-py3-none-any.whl", hash = "sha256:4b32218728ca3d0be83835b604603a0cd6c329066e884bb78149334267f92440"}, - {file = "aiobotocore-2.5.4.tar.gz", hash = "sha256:60341f19eda77e41e1ab11eef171b5a98b5dbdb90804f5334b6f90e560e31fae"}, + {file = "aiobotocore-2.11.2-py3-none-any.whl", hash = "sha256:487fede588040bfa3a43df945275c28c1c73ca75bf705295adb9fbadd2e89be7"}, + {file = "aiobotocore-2.11.2.tar.gz", hash = "sha256:6dd7352248e3523019c5a54a395d2b1c31080697fc80a9ad2672de4eec8c7abd"}, ] [package.dependencies] -aiohttp = ">=3.3.1,<4.0.0" +aiohttp = ">=3.7.4.post0,<4.0.0" aioitertools = ">=0.5.1,<1.0.0" -botocore = ">=1.31.17,<1.31.18" +botocore = ">=1.33.2,<1.34.35" wrapt = ">=1.10.10,<2.0.0" [package.extras] -awscli = ["awscli (>=1.29.17,<1.29.18)"] -boto3 = ["boto3 (>=1.28.17,<1.28.18)"] +awscli = ["awscli (>=1.31.2,<1.32.35)"] +boto3 = ["boto3 (>=1.33.2,<1.34.35)"] [[package]] name = "aiohttp" -version = "3.9.0" +version = "3.9.3" description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6896b8416be9ada4d22cd359d7cb98955576ce863eadad5596b7cdfbf3e17c6c"}, - {file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1736d87dad8ef46a8ec9cddd349fa9f7bd3a064c47dd6469c0d6763d3d49a4fc"}, - {file = "aiohttp-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c9e5f4d7208cda1a2bb600e29069eecf857e6980d0ccc922ccf9d1372c16f4b"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8488519aa05e636c5997719fe543c8daf19f538f4fa044f3ce94bee608817cff"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ab16c254e2312efeb799bc3c06897f65a133b38b69682bf75d1f1ee1a9c43a9"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a94bde005a8f926d0fa38b88092a03dea4b4875a61fbcd9ac6f4351df1b57cd"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b777c9286b6c6a94f50ddb3a6e730deec327e9e2256cb08b5530db0f7d40fd8"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571760ad7736b34d05597a1fd38cbc7d47f7b65deb722cb8e86fd827404d1f6b"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:deac0a32aec29608eb25d730f4bc5a261a65b6c48ded1ed861d2a1852577c932"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4ee1b4152bc3190cc40ddd6a14715e3004944263ea208229ab4c297712aa3075"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3607375053df58ed6f23903aa10cf3112b1240e8c799d243bbad0f7be0666986"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:65b0a70a25456d329a5e1426702dde67be0fb7a4ead718005ba2ca582d023a94"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a2eb5311a37fe105aa35f62f75a078537e1a9e4e1d78c86ec9893a3c97d7a30"}, - {file = "aiohttp-3.9.0-cp310-cp310-win32.whl", hash = "sha256:2cbc14a13fb6b42d344e4f27746a4b03a2cb0c1c3c5b932b0d6ad8881aa390e3"}, - {file = "aiohttp-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ac9669990e2016d644ba8ae4758688534aabde8dbbc81f9af129c3f5f01ca9cd"}, - {file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f8e05f5163528962ce1d1806fce763ab893b1c5b7ace0a3538cd81a90622f844"}, - {file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4afa8f71dba3a5a2e1e1282a51cba7341ae76585345c43d8f0e624882b622218"}, - {file = "aiohttp-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f929f4c9b9a00f3e6cc0587abb95ab9c05681f8b14e0fe1daecfa83ea90f8318"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28185e36a78d247c55e9fbea2332d16aefa14c5276a582ce7a896231c6b1c208"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a486ddf57ab98b6d19ad36458b9f09e6022de0381674fe00228ca7b741aacb2f"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70e851f596c00f40a2f00a46126c95c2e04e146015af05a9da3e4867cfc55911"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b7bf8fe4d39886adc34311a233a2e01bc10eb4e842220235ed1de57541a896"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c67a51ea415192c2e53e4e048c78bab82d21955b4281d297f517707dc836bf3d"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:694df243f394629bcae2d8ed94c589a181e8ba8604159e6e45e7b22e58291113"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3dd8119752dd30dd7bca7d4bc2a92a59be6a003e4e5c2cf7e248b89751b8f4b7"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:eb6dfd52063186ac97b4caa25764cdbcdb4b10d97f5c5f66b0fa95052e744eb7"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d97c3e286d0ac9af6223bc132dc4bad6540b37c8d6c0a15fe1e70fb34f9ec411"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:816f4db40555026e4cdda604a1088577c1fb957d02f3f1292e0221353403f192"}, - {file = "aiohttp-3.9.0-cp311-cp311-win32.whl", hash = "sha256:3abf0551874fecf95f93b58f25ef4fc9a250669a2257753f38f8f592db85ddea"}, - {file = "aiohttp-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:e18d92c3e9e22553a73e33784fcb0ed484c9874e9a3e96c16a8d6a1e74a0217b"}, - {file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:99ae01fb13a618b9942376df77a1f50c20a281390dad3c56a6ec2942e266220d"}, - {file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:05857848da443c8c12110d99285d499b4e84d59918a21132e45c3f0804876994"}, - {file = "aiohttp-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:317719d7f824eba55857fe0729363af58e27c066c731bc62cd97bc9c3d9c7ea4"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1e3b3c107ccb0e537f309f719994a55621acd2c8fdf6d5ce5152aed788fb940"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45820ddbb276113ead8d4907a7802adb77548087ff5465d5c554f9aa3928ae7d"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a183f1978802588711aed0dea31e697d760ce9055292db9dc1604daa9a8ded"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a4cd44788ea0b5e6bb8fa704597af3a30be75503a7ed1098bc5b8ffdf6c982"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673343fbc0c1ac44d0d2640addc56e97a052504beacd7ade0dc5e76d3a4c16e8"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e8a3b79b6d186a9c99761fd4a5e8dd575a48d96021f220ac5b5fa856e5dd029"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6777a390e41e78e7c45dab43a4a0196c55c3b8c30eebe017b152939372a83253"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7ae5f99a32c53731c93ac3075abd3e1e5cfbe72fc3eaac4c27c9dd64ba3b19fe"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f1e4f254e9c35d8965d377e065c4a8a55d396fe87c8e7e8429bcfdeeb229bfb3"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11ca808f9a6b63485059f5f6e164ef7ec826483c1212a44f268b3653c91237d8"}, - {file = "aiohttp-3.9.0-cp312-cp312-win32.whl", hash = "sha256:de3cc86f4ea8b4c34a6e43a7306c40c1275e52bfa9748d869c6b7d54aa6dad80"}, - {file = "aiohttp-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca4fddf84ac7d8a7d0866664936f93318ff01ee33e32381a115b19fb5a4d1202"}, - {file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f09960b5bb1017d16c0f9e9f7fc42160a5a49fa1e87a175fd4a2b1a1833ea0af"}, - {file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8303531e2c17b1a494ffaeba48f2da655fe932c4e9a2626c8718403c83e5dd2b"}, - {file = "aiohttp-3.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4790e44f46a4aa07b64504089def5744d3b6780468c4ec3a1a36eb7f2cae9814"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1d7edf74a36de0e5ca50787e83a77cf352f5504eb0ffa3f07000a911ba353fb"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94697c7293199c2a2551e3e3e18438b4cba293e79c6bc2319f5fd652fccb7456"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1b66dbb8a7d5f50e9e2ea3804b01e766308331d0cac76eb30c563ac89c95985"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9623cfd9e85b76b83ef88519d98326d4731f8d71869867e47a0b979ffec61c73"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f32c86dc967ab8c719fd229ce71917caad13cc1e8356ee997bf02c5b368799bf"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f50b4663c3e0262c3a361faf440761fbef60ccdde5fe8545689a4b3a3c149fb4"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dcf71c55ec853826cd70eadb2b6ac62ec577416442ca1e0a97ad875a1b3a0305"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:42fe4fd9f0dfcc7be4248c162d8056f1d51a04c60e53366b0098d1267c4c9da8"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76a86a9989ebf82ee61e06e2bab408aec4ea367dc6da35145c3352b60a112d11"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f9e09a1c83521d770d170b3801eea19b89f41ccaa61d53026ed111cb6f088887"}, - {file = "aiohttp-3.9.0-cp38-cp38-win32.whl", hash = "sha256:a00ce44c21612d185c5275c5cba4bab8d7c1590f248638b667ed8a782fa8cd6f"}, - {file = "aiohttp-3.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:d5b9345ab92ebe6003ae11d8092ce822a0242146e6fa270889b9ba965457ca40"}, - {file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98d21092bf2637c5fa724a428a69e8f5955f2182bff61f8036827cf6ce1157bf"}, - {file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35a68cd63ca6aaef5707888f17a70c36efe62b099a4e853d33dc2e9872125be8"}, - {file = "aiohttp-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7f6235c7475658acfc1769d968e07ab585c79f6ca438ddfecaa9a08006aee2"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db04d1de548f7a62d1dd7e7cdf7c22893ee168e22701895067a28a8ed51b3735"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:536b01513d67d10baf6f71c72decdf492fb7433c5f2f133e9a9087379d4b6f31"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c8b0a6487e8109427ccf638580865b54e2e3db4a6e0e11c02639231b41fc0f"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7276fe0017664414fdc3618fca411630405f1aaf0cc3be69def650eb50441787"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23170247ef89ffa842a02bbfdc425028574d9e010611659abeb24d890bc53bb8"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b1a2ea8252cacc7fd51df5a56d7a2bb1986ed39be9397b51a08015727dfb69bd"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d71abc15ff7047412ef26bf812dfc8d0d1020d664617f4913df2df469f26b76"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d820162c8c2bdbe97d328cd4f417c955ca370027dce593345e437b2e9ffdc4d"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:2779f5e7c70f7b421915fd47db332c81de365678180a9f3ab404088f87ba5ff9"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:366bc870d7ac61726f32a489fbe3d1d8876e87506870be66b01aeb84389e967e"}, - {file = "aiohttp-3.9.0-cp39-cp39-win32.whl", hash = "sha256:1df43596b826022b14998f0460926ce261544fedefe0d2f653e1b20f49e96454"}, - {file = "aiohttp-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:9c196b30f1b1aa3363a69dd69079ae9bec96c2965c4707eaa6914ba099fb7d4f"}, - {file = "aiohttp-3.9.0.tar.gz", hash = "sha256:09f23292d29135025e19e8ff4f0a68df078fe4ee013bca0105b2e803989de92d"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, ] [package.dependencies] @@ -169,13 +170,13 @@ frozenlist = ">=1.1.0" [[package]] name = "annotated-types" -version = "0.5.0" +version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, - {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] [package.dependencies] @@ -194,41 +195,42 @@ files = [ [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "aws-sam-translator" -version = "1.82.0" +version = "1.85.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false -python-versions = ">=3.7, <=4.0, !=4.0" +python-versions = ">=3.8, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.82.0.tar.gz", hash = "sha256:f78e58194461635aef6255d04e82a9b690e331ca9fd669d1401bf7f9a93ae49f"}, - {file = "aws_sam_translator-1.82.0-py3-none-any.whl", hash = "sha256:29ba61f2a70b2b1cf0c76b92b78a23c7cdd19ea1b0a5992753180b56d040d20b"}, + {file = "aws-sam-translator-1.85.0.tar.gz", hash = "sha256:e41938affa128fb5bde5e1989b260bf539a96369bba3faf316ce66651351df39"}, + {file = "aws_sam_translator-1.85.0-py3-none-any.whl", hash = "sha256:e8c69a4db7279421ff6c3579cd4d43395fe9b6781f50416528e984be68e25481"}, ] [package.dependencies] boto3 = ">=1.19.5,<2.dev0" jsonschema = ">=3.2,<5" pydantic = ">=1.8,<3" -typing-extensions = ">=4.4,<5" +typing-extensions = ">=4.4" [package.extras] -dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] +dev = ["black (==23.10.1)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (>=0.1.0,<0.2.0)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" @@ -247,17 +249,17 @@ wrapt = "*" [[package]] name = "azure-core" -version = "1.29.4" +version = "1.30.0" description = "Microsoft Azure Core Library for Python" optional = true python-versions = ">=3.7" files = [ - {file = "azure-core-1.29.4.tar.gz", hash = "sha256:500b3aa9bf2e90c5ccc88bb105d056114ca0ce7d0ce73afb8bc4d714b2fc7568"}, - {file = "azure_core-1.29.4-py3-none-any.whl", hash = "sha256:b03261bcba22c0b9290faf9999cedd23e849ed2577feee90515694cea6bc74bf"}, + {file = "azure-core-1.30.0.tar.gz", hash = "sha256:6f3a7883ef184722f6bd997262eddaf80cfe7e5b3e0caaaf8db1695695893d35"}, + {file = "azure_core-1.30.0-py3-none-any.whl", hash = "sha256:3dae7962aad109610e68c9a7abb31d79720e1d982ddf61363038d175a5025e89"}, ] [package.dependencies] -requests = ">=2.18.4" +requests = ">=2.21.0" six = ">=1.11.0" typing-extensions = ">=4.6.0" @@ -282,30 +284,30 @@ requests = ">=2.20.0" [[package]] name = "azure-identity" -version = "1.14.0" +version = "1.15.0" description = "Microsoft Azure Identity Library for Python" optional = true python-versions = ">=3.7" files = [ - {file = "azure-identity-1.14.0.zip", hash = "sha256:72441799f8c5c89bfe21026965e266672a7c5d050c2c65119ef899dd5362e2b1"}, - {file = "azure_identity-1.14.0-py3-none-any.whl", hash = "sha256:edabf0e010eb85760e1dd19424d5e8f97ba2c9caff73a16e7b30ccbdbcce369b"}, + {file = "azure-identity-1.15.0.tar.gz", hash = "sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8"}, + {file = "azure_identity-1.15.0-py3-none-any.whl", hash = "sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912"}, ] [package.dependencies] -azure-core = ">=1.11.0,<2.0.0" +azure-core = ">=1.23.0,<2.0.0" cryptography = ">=2.5" -msal = ">=1.20.0,<2.0.0" +msal = ">=1.24.0,<2.0.0" msal-extensions = ">=0.3.0,<2.0.0" [[package]] name = "azure-storage-blob" -version = "12.18.1" +version = "12.19.0" description = "Microsoft Azure Blob Storage Client Library for Python" optional = true python-versions = ">=3.7" files = [ - {file = "azure-storage-blob-12.18.1.tar.gz", hash = "sha256:d3265c2403c28d8881326c365e9cf7ed2ad55fdac98404eae753548702b31ba2"}, - {file = "azure_storage_blob-12.18.1-py3-none-any.whl", hash = "sha256:00b92568e91d608c04dfd4814c3b180818e690023493bb984c22dfc1a8a96e55"}, + {file = "azure-storage-blob-12.19.0.tar.gz", hash = "sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897"}, + {file = "azure_storage_blob-12.19.0-py3-none-any.whl", hash = "sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b"}, ] [package.dependencies] @@ -330,41 +332,44 @@ files = [ [[package]] name = "boto3" -version = "1.28.17" +version = "1.34.34" description = "The AWS SDK for Python" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "boto3-1.28.17-py3-none-any.whl", hash = "sha256:bca0526f819e0f19c0f1e6eba3e2d1d6b6a92a45129f98c0d716e5aab6d9444b"}, - {file = "boto3-1.28.17.tar.gz", hash = "sha256:90f7cfb5e1821af95b1fc084bc50e6c47fa3edc99f32de1a2591faa0c546bea7"}, + {file = "boto3-1.34.34-py3-none-any.whl", hash = "sha256:33a8b6d9136fa7427160edb92d2e50f2035f04e9d63a2d1027349053e12626aa"}, + {file = "boto3-1.34.34.tar.gz", hash = "sha256:b2f321e20966f021ec800b7f2c01287a3dd04fc5965acdfbaa9c505a24ca45d1"}, ] [package.dependencies] -botocore = ">=1.31.17,<1.32.0" +botocore = ">=1.34.34,<1.35.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.6.0,<0.7.0" +s3transfer = ">=0.10.0,<0.11.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.17" +version = "1.34.34" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "botocore-1.31.17-py3-none-any.whl", hash = "sha256:6ac34a1d34aa3750e78b77b8596617e2bab938964694d651939dba2cbde2c12b"}, - {file = "botocore-1.31.17.tar.gz", hash = "sha256:396459065dba4339eb4da4ec8b4e6599728eb89b7caaceea199e26f7d824a41c"}, + {file = "botocore-1.34.34-py3-none-any.whl", hash = "sha256:cd060b0d88ebb2b893f1411c1db7f2ba66cc18e52dcc57ad029564ef5fec437b"}, + {file = "botocore-1.34.34.tar.gz", hash = "sha256:54093dc97372bb7683f5c61a279aa8240408abf3b2cc494ae82a9a90c1b784b5"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = ">=1.25.4,<1.27" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, +] [package.extras] -crt = ["awscrt (==0.16.26)"] +crt = ["awscrt (==0.19.19)"] [[package]] name = "build" @@ -392,97 +397,85 @@ virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "cachetools" -version = "5.3.1" +version = "5.3.2" description = "Extensible memoizing collections and decorators" optional = true python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, ] [[package]] name = "certifi" -version = "2023.7.22" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -501,17 +494,17 @@ files = [ [[package]] name = "cfn-lint" -version = "0.83.6" +version = "0.85.2" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" optional = false -python-versions = ">=3.7, <=4.0, !=4.0" +python-versions = ">=3.8, <=4.0, !=4.0" files = [ - {file = "cfn-lint-0.83.6.tar.gz", hash = "sha256:4666d3b7ceccab1e6b28cc75731a0a199185060f38a3f97f0207945ccf358d24"}, - {file = "cfn_lint-0.83.6-py3-none-any.whl", hash = "sha256:97f05a3649a9480f88737aefadc926742081692cd31c774be59cdd1f88440795"}, + {file = "cfn-lint-0.85.2.tar.gz", hash = "sha256:f8a5cc55daeaaa747b8d776dcf62fe1b6bfb8cb46ae60950cbe627601facccd7"}, + {file = "cfn_lint-0.85.2-py3-none-any.whl", hash = "sha256:e7a0aafb9ad93dbe5db54cbefca92a94f2d173309218273ef997ecb048125d89"}, ] [package.dependencies] -aws-sam-translator = ">=1.82.0" +aws-sam-translator = ">=1.84.0" jschema-to-python = ">=1.2.3,<1.3.0" jsonpatch = "*" jsonschema = ">=3.0,<5" @@ -524,86 +517,101 @@ sympy = ">=1.0.0" [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -633,63 +641,63 @@ files = [ [[package]] name = "coverage" -version = "7.3.4" +version = "7.4.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aff2bd3d585969cc4486bfc69655e862028b689404563e6b549e6a8244f226df"}, - {file = "coverage-7.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4353923f38d752ecfbd3f1f20bf7a3546993ae5ecd7c07fd2f25d40b4e54571"}, - {file = "coverage-7.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea473c37872f0159294f7073f3fa72f68b03a129799f3533b2bb44d5e9fa4f82"}, - {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5214362abf26e254d749fc0c18af4c57b532a4bfde1a057565616dd3b8d7cc94"}, - {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f99b7d3f7a7adfa3d11e3a48d1a91bb65739555dd6a0d3fa68aa5852d962e5b1"}, - {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:74397a1263275bea9d736572d4cf338efaade2de9ff759f9c26bcdceb383bb49"}, - {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f154bd866318185ef5865ace5be3ac047b6d1cc0aeecf53bf83fe846f4384d5d"}, - {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e0d84099ea7cba9ff467f9c6f747e3fc3906e2aadac1ce7b41add72e8d0a3712"}, - {file = "coverage-7.3.4-cp310-cp310-win32.whl", hash = "sha256:3f477fb8a56e0c603587b8278d9dbd32e54bcc2922d62405f65574bd76eba78a"}, - {file = "coverage-7.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:c75738ce13d257efbb6633a049fb2ed8e87e2e6c2e906c52d1093a4d08d67c6b"}, - {file = "coverage-7.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:997aa14b3e014339d8101b9886063c5d06238848905d9ad6c6eabe533440a9a7"}, - {file = "coverage-7.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a9c5bc5db3eb4cd55ecb8397d8e9b70247904f8eca718cc53c12dcc98e59fc8"}, - {file = "coverage-7.3.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27ee94f088397d1feea3cb524e4313ff0410ead7d968029ecc4bc5a7e1d34fbf"}, - {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ce03e25e18dd9bf44723e83bc202114817f3367789052dc9e5b5c79f40cf59d"}, - {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85072e99474d894e5df582faec04abe137b28972d5e466999bc64fc37f564a03"}, - {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a877810ef918d0d345b783fc569608804f3ed2507bf32f14f652e4eaf5d8f8d0"}, - {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ac17b94ab4ca66cf803f2b22d47e392f0977f9da838bf71d1f0db6c32893cb9"}, - {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:36d75ef2acab74dc948d0b537ef021306796da551e8ac8b467810911000af66a"}, - {file = "coverage-7.3.4-cp311-cp311-win32.whl", hash = "sha256:47ee56c2cd445ea35a8cc3ad5c8134cb9bece3a5cb50bb8265514208d0a65928"}, - {file = "coverage-7.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:11ab62d0ce5d9324915726f611f511a761efcca970bd49d876cf831b4de65be5"}, - {file = "coverage-7.3.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:33e63c578f4acce1b6cd292a66bc30164495010f1091d4b7529d014845cd9bee"}, - {file = "coverage-7.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:782693b817218169bfeb9b9ba7f4a9f242764e180ac9589b45112571f32a0ba6"}, - {file = "coverage-7.3.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4277ddaad9293454da19121c59f2d850f16bcb27f71f89a5c4836906eb35ef"}, - {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d892a19ae24b9801771a5a989fb3e850bd1ad2e2b6e83e949c65e8f37bc67a1"}, - {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3024ec1b3a221bd10b5d87337d0373c2bcaf7afd86d42081afe39b3e1820323b"}, - {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1c3e9d2bbd6f3f79cfecd6f20854f4dc0c6e0ec317df2b265266d0dc06535f1"}, - {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e91029d7f151d8bf5ab7d8bfe2c3dbefd239759d642b211a677bc0709c9fdb96"}, - {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6879fe41c60080aa4bb59703a526c54e0412b77e649a0d06a61782ecf0853ee1"}, - {file = "coverage-7.3.4-cp312-cp312-win32.whl", hash = "sha256:fd2f8a641f8f193968afdc8fd1697e602e199931012b574194052d132a79be13"}, - {file = "coverage-7.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:d1d0ce6c6947a3a4aa5479bebceff2c807b9f3b529b637e2b33dea4468d75fc7"}, - {file = "coverage-7.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:36797b3625d1da885b369bdaaa3b0d9fb8865caed3c2b8230afaa6005434aa2f"}, - {file = "coverage-7.3.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfed0ec4b419fbc807dec417c401499ea869436910e1ca524cfb4f81cf3f60e7"}, - {file = "coverage-7.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97ff5a9fc2ca47f3383482858dd2cb8ddbf7514427eecf5aa5f7992d0571429"}, - {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:607b6c6b35aa49defaebf4526729bd5238bc36fe3ef1a417d9839e1d96ee1e4c"}, - {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8e258dcc335055ab59fe79f1dec217d9fb0cdace103d6b5c6df6b75915e7959"}, - {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a02ac7c51819702b384fea5ee033a7c202f732a2a2f1fe6c41e3d4019828c8d3"}, - {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b710869a15b8caf02e31d16487a931dbe78335462a122c8603bb9bd401ff6fb2"}, - {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6a23ae9348a7a92e7f750f9b7e828448e428e99c24616dec93a0720342f241d"}, - {file = "coverage-7.3.4-cp38-cp38-win32.whl", hash = "sha256:758ebaf74578b73f727acc4e8ab4b16ab6f22a5ffd7dd254e5946aba42a4ce76"}, - {file = "coverage-7.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:309ed6a559bc942b7cc721f2976326efbfe81fc2b8f601c722bff927328507dc"}, - {file = "coverage-7.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aefbb29dc56317a4fcb2f3857d5bce9b881038ed7e5aa5d3bcab25bd23f57328"}, - {file = "coverage-7.3.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:183c16173a70caf92e2dfcfe7c7a576de6fa9edc4119b8e13f91db7ca33a7923"}, - {file = "coverage-7.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a4184dcbe4f98d86470273e758f1d24191ca095412e4335ff27b417291f5964"}, - {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93698ac0995516ccdca55342599a1463ed2e2d8942316da31686d4d614597ef9"}, - {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb220b3596358a86361139edce40d97da7458412d412e1e10c8e1970ee8c09ab"}, - {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5b14abde6f8d969e6b9dd8c7a013d9a2b52af1235fe7bebef25ad5c8f47fa18"}, - {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:610afaf929dc0e09a5eef6981edb6a57a46b7eceff151947b836d869d6d567c1"}, - {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed790728fb71e6b8247bd28e77e99d0c276dff952389b5388169b8ca7b1c28"}, - {file = "coverage-7.3.4-cp39-cp39-win32.whl", hash = "sha256:c15fdfb141fcf6a900e68bfa35689e1256a670db32b96e7a931cab4a0e1600e5"}, - {file = "coverage-7.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:38d0b307c4d99a7aca4e00cad4311b7c51b7ac38fb7dea2abe0d182dd4008e05"}, - {file = "coverage-7.3.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b1e0f25ae99cf247abfb3f0fac7ae25739e4cd96bf1afa3537827c576b4847e5"}, - {file = "coverage-7.3.4.tar.gz", hash = "sha256:020d56d2da5bc22a0e00a5b0d54597ee91ad72446fa4cf1b97c35022f6b6dbf0"}, + {file = "coverage-7.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf54c3e089179d9d23900e3efc86d46e4431188d9a657f345410eecdd0151f50"}, + {file = "coverage-7.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe6e43c8b510719b48af7db9631b5fbac910ade4bd90e6378c85ac5ac706382c"}, + {file = "coverage-7.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b98c89db1b150d851a7840142d60d01d07677a18f0f46836e691c38134ed18b"}, + {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5f9683be6a5b19cd776ee4e2f2ffb411424819c69afab6b2db3a0a364ec6642"}, + {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cdcbf7b9cb83fe047ee09298e25b1cd1636824067166dc97ad0543b079d22f"}, + {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2599972b21911111114100d362aea9e70a88b258400672626efa2b9e2179609c"}, + {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ef00d31b7569ed3cb2036f26565f1984b9fc08541731ce01012b02a4c238bf03"}, + {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:20a875bfd8c282985c4720c32aa05056f77a68e6d8bbc5fe8632c5860ee0b49b"}, + {file = "coverage-7.4.2-cp310-cp310-win32.whl", hash = "sha256:b3f2b1eb229f23c82898eedfc3296137cf1f16bb145ceab3edfd17cbde273fb7"}, + {file = "coverage-7.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7df95fdd1432a5d2675ce630fef5f239939e2b3610fe2f2b5bf21fa505256fa3"}, + {file = "coverage-7.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8ddbd158e069dded57738ea69b9744525181e99974c899b39f75b2b29a624e2"}, + {file = "coverage-7.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81a5fb41b0d24447a47543b749adc34d45a2cf77b48ca74e5bf3de60a7bd9edc"}, + {file = "coverage-7.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2412e98e70f16243be41d20836abd5f3f32edef07cbf8f407f1b6e1ceae783ac"}, + {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb79414c15c6f03f56cc68fa06994f047cf20207c31b5dad3f6bab54a0f66ef"}, + {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf89ab85027427d351f1de918aff4b43f4eb5f33aff6835ed30322a86ac29c9e"}, + {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a178b7b1ac0f1530bb28d2e51f88c0bab3e5949835851a60dda80bff6052510c"}, + {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:06fe398145a2e91edaf1ab4eee66149c6776c6b25b136f4a86fcbbb09512fd10"}, + {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:18cac867950943fe93d6cd56a67eb7dcd2d4a781a40f4c1e25d6f1ed98721a55"}, + {file = "coverage-7.4.2-cp311-cp311-win32.whl", hash = "sha256:f72cdd2586f9a769570d4b5714a3837b3a59a53b096bb954f1811f6a0afad305"}, + {file = "coverage-7.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:d779a48fac416387dd5673fc5b2d6bd903ed903faaa3247dc1865c65eaa5a93e"}, + {file = "coverage-7.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:adbdfcda2469d188d79771d5696dc54fab98a16d2ef7e0875013b5f56a251047"}, + {file = "coverage-7.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ac4bab32f396b03ebecfcf2971668da9275b3bb5f81b3b6ba96622f4ef3f6e17"}, + {file = "coverage-7.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:006d220ba2e1a45f1de083d5022d4955abb0aedd78904cd5a779b955b019ec73"}, + {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3733545eb294e5ad274abe131d1e7e7de4ba17a144505c12feca48803fea5f64"}, + {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a9e754aa250fe61f0f99986399cec086d7e7a01dd82fd863a20af34cbce962"}, + {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2ed37e16cf35c8d6e0b430254574b8edd242a367a1b1531bd1adc99c6a5e00fe"}, + {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b953275d4edfab6cc0ed7139fa773dfb89e81fee1569a932f6020ce7c6da0e8f"}, + {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32b4ab7e6c924f945cbae5392832e93e4ceb81483fd6dc4aa8fb1a97b9d3e0e1"}, + {file = "coverage-7.4.2-cp312-cp312-win32.whl", hash = "sha256:f5df76c58977bc35a49515b2fbba84a1d952ff0ec784a4070334dfbec28a2def"}, + {file = "coverage-7.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:34423abbaad70fea9d0164add189eabaea679068ebdf693baa5c02d03e7db244"}, + {file = "coverage-7.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b11f9c6587668e495cc7365f85c93bed34c3a81f9f08b0920b87a89acc13469"}, + {file = "coverage-7.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:51593a1f05c39332f623d64d910445fdec3d2ac2d96b37ce7f331882d5678ddf"}, + {file = "coverage-7.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69f1665165ba2fe7614e2f0c1aed71e14d83510bf67e2ee13df467d1c08bf1e8"}, + {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3c8bbb95a699c80a167478478efe5e09ad31680931ec280bf2087905e3b95ec"}, + {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:175f56572f25e1e1201d2b3e07b71ca4d201bf0b9cb8fad3f1dfae6a4188de86"}, + {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8562ca91e8c40864942615b1d0b12289d3e745e6b2da901d133f52f2d510a1e3"}, + {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a1ef0f173e1a19738f154fb3644f90d0ada56fe6c9b422f992b04266c55d5a"}, + {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f40ac873045db4fd98a6f40387d242bde2708a3f8167bd967ccd43ad46394ba2"}, + {file = "coverage-7.4.2-cp38-cp38-win32.whl", hash = "sha256:d1b750a8409bec61caa7824bfd64a8074b6d2d420433f64c161a8335796c7c6b"}, + {file = "coverage-7.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b4ae777bebaed89e3a7e80c4a03fac434a98a8abb5251b2a957d38fe3fd30088"}, + {file = "coverage-7.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ff7f92ae5a456101ca8f48387fd3c56eb96353588e686286f50633a611afc95"}, + {file = "coverage-7.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:861d75402269ffda0b33af94694b8e0703563116b04c681b1832903fac8fd647"}, + {file = "coverage-7.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3507427d83fa961cbd73f11140f4a5ce84208d31756f7238d6257b2d3d868405"}, + {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf711d517e21fb5bc429f5c4308fbc430a8585ff2a43e88540264ae87871e36a"}, + {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c00e54f0bd258ab25e7f731ca1d5144b0bf7bec0051abccd2bdcff65fa3262c9"}, + {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8e845d894e39fb53834da826078f6dc1a933b32b1478cf437007367efaf6f6a"}, + {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:840456cb1067dc350af9080298c7c2cfdddcedc1cb1e0b30dceecdaf7be1a2d3"}, + {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c11ca2df2206a4e3e4c4567f52594637392ed05d7c7fb73b4ea1c658ba560265"}, + {file = "coverage-7.4.2-cp39-cp39-win32.whl", hash = "sha256:3ff5bdb08d8938d336ce4088ca1a1e4b6c8cd3bef8bb3a4c0eb2f37406e49643"}, + {file = "coverage-7.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:ac9e95cefcf044c98d4e2c829cd0669918585755dd9a92e28a1a7012322d0a95"}, + {file = "coverage-7.4.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:f593a4a90118d99014517c2679e04a4ef5aee2d81aa05c26c734d271065efcb6"}, + {file = "coverage-7.4.2.tar.gz", hash = "sha256:1a5ee18e3a8d766075ce9314ed1cb695414bae67df6a4b0805f5137d93d6f1cb"}, ] [package.dependencies] @@ -700,107 +708,123 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.6" +version = "42.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c"}, - {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d"}, - {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c"}, - {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596"}, - {file = "cryptography-41.0.6-cp37-abi3-win32.whl", hash = "sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660"}, - {file = "cryptography-41.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4"}, - {file = "cryptography-41.0.6.tar.gz", hash = "sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3"}, + {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"}, + {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"}, + {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"}, + {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"}, + {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"}, + {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"}, + {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"}, + {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "cython" -version = "3.0.7" +version = "3.0.8" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "Cython-3.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3c0e19bb41de6be9d8afc85795159ca16296be81a586cd9588be0400d44a855"}, - {file = "Cython-3.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e8bf00ec1dd1d92e9ae74d2e6891f087a939e1dfb40c9c7fa5d8d6a26c94f5a"}, - {file = "Cython-3.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd6ae43ef2e596c9a88dbf2a8895be2e32cc2f5bc3c8ba2e7753b69068fc0b2d"}, - {file = "Cython-3.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f674be92673e87dd8ee7cfe553d5960ec4effc5ab15063b9a5e265a51585a31a"}, - {file = "Cython-3.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:861cf254bf5836d47c2aee86aa75dd93d3de00ccd1b077c3c7a2bb22cba358e7"}, - {file = "Cython-3.0.7-cp310-cp310-win32.whl", hash = "sha256:f6d8ff62ad55dc0393686438eac4b457a916e4d1118a0b550746bb52b4c756cc"}, - {file = "Cython-3.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:e13abb14843397b76d0472c7d33cd260d5f262ab05cc27ed423317e645e29643"}, - {file = "Cython-3.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c636c9ab92c7838231a1ba769e519d953af8294612f3f772a54d3a5250ff23f"}, - {file = "Cython-3.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22d2a684122dfb531853d57c8c85c1d5d44be709e12466dca99fa6aee7d8054f"}, - {file = "Cython-3.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1bdf8a107fdf9e174991aa87a0be7504f60de1ec6bfb1ccfb30e33acac818a0"}, - {file = "Cython-3.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3a83e04fde663b84905f3a20213a4333d13a07b79434300704b70dc552761f8b"}, - {file = "Cython-3.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e34b4b08d795ccca920fa26b099558f4f1e4e3f794e4ba8d3433c5bc2454d50a"}, - {file = "Cython-3.0.7-cp311-cp311-win32.whl", hash = "sha256:133057ac45b6fa7fe5d7baada9d3545d09339432f75c0545f556e8c6fecc2932"}, - {file = "Cython-3.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:b65abca78aa5ebc8675c8480b9a53006f6efea9910ad099cf32c9fb5617ef251"}, - {file = "Cython-3.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ceac5315fe899c229e874328742154e331fa41337bb03f6f5264636c351c9e"}, - {file = "Cython-3.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea936cf5931297ba07bce121388c4c6266c1b63a9f4d648ae16c92ff090204b"}, - {file = "Cython-3.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fcd9a18ee3ac7f460e0841954feb495102ffbdbec0e6c78562f3495cda000dd"}, - {file = "Cython-3.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7c8d579d13cb81abe704c8b0908d122b81d6e2623265a19c4a6a7377f440debb"}, - {file = "Cython-3.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ef5bb0268bfe5992da3ef9292463a5a895ed8700b134ed2c00008d5471b3ba6e"}, - {file = "Cython-3.0.7-cp312-cp312-win32.whl", hash = "sha256:55f93d3822bc196b37a8bdfa4ec6a35232a399e97f2baa714bd5ed8ea9b0ce68"}, - {file = "Cython-3.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:f3845c4506e0d207c5e268fb02813928f3a1e135de954a379f165ef0d581da47"}, - {file = "Cython-3.0.7-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ad7c2303a338b2c0b6c6c68f101a6768725934538756096cf3388a5c07a7525"}, - {file = "Cython-3.0.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed25959e4025870fdde5f895fcb126196d22affd4f4fad85a2823e0dddc85b0"}, - {file = "Cython-3.0.7-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79868ec74e4907a8a6e63effe13547c6157f196a162920b1de066da5849ffb8e"}, - {file = "Cython-3.0.7-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5e3a038332973b12e72236e8884dc99601a840334c2c46cfbbb5851cb94166eb"}, - {file = "Cython-3.0.7-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:f2602a5c97a3d618b3b847514204ef3349fb414c59e1126c0c2c708d2c5680f8"}, - {file = "Cython-3.0.7-cp36-cp36m-win32.whl", hash = "sha256:539ad5a21141e6420035cf616bcba48d999bf878839e52692f97fc7e2f16265c"}, - {file = "Cython-3.0.7-cp36-cp36m-win_amd64.whl", hash = "sha256:848a28ea49166454c3bff927e5a47629eecf1aa755d6fb3290569cba0fc93766"}, - {file = "Cython-3.0.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82f27a0134fc6bb46032ca5f728d8af984f3be94a3cb01cb70ff1224e551b9cf"}, - {file = "Cython-3.0.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f20c61114c7948cf1214585066406cef4b54a9b935160980e0b6e70ada3a69"}, - {file = "Cython-3.0.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d51709e10ad6213b4bf094af7be7ff82bab43216b3c92a07d05b451deeca79"}, - {file = "Cython-3.0.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3f02c7240abab48d59f0d5fef7064f18f01a2a204616165fa6367a8abf5a8832"}, - {file = "Cython-3.0.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:225f8bba6428b8d711ca2d6c738d2e3a4667f6a2ae40f8a7a5256f69f6a3600e"}, - {file = "Cython-3.0.7-cp37-cp37m-win32.whl", hash = "sha256:30eb2d2938b9195e2c82951713429aff3ad1be9f104437d1536a04eb0cb3dc0e"}, - {file = "Cython-3.0.7-cp37-cp37m-win_amd64.whl", hash = "sha256:167b3f3894dcc697cefefac1d198304fae8eb4d5860a7b8bc2459d572e838470"}, - {file = "Cython-3.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c67105f2c6ccf5b3adbcfaecf3c5c9fa8940f9f97955c9ad7d2542151d97d93"}, - {file = "Cython-3.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a1859af761977530df2cd5c36e31d54e8d6708ad2c4656e7125c482364dc216"}, - {file = "Cython-3.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01b94304aab87496e81d1f546e71abf57b430b39be4269df1cd7da9928d70b5b"}, - {file = "Cython-3.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:931aade65f77cf59f2a702ac1f549a4836ce221107c740502cbad18d6d8e9511"}, - {file = "Cython-3.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:812b193c26553f1f375d4f1c50f805c227b24ed2d595bc9cdaf78c992ecc64a4"}, - {file = "Cython-3.0.7-cp38-cp38-win32.whl", hash = "sha256:b227643d8a40b68554dc7d37fcd03fc97b4fb0bd2614aeb5f2e07ab244642d36"}, - {file = "Cython-3.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:0d8a98c7d86ac4d05b251c39faf49423780381aab55fbf2e147f6e006a34a58a"}, - {file = "Cython-3.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:816f5285d596062c7ef22790de7d75354b58d4417a9fc64cba914aeeb900db0b"}, - {file = "Cython-3.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d0dae6dccd349b8ccf197c10ef2d05c711ca36a649c7eddbab1de2c90b63a1"}, - {file = "Cython-3.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13211b67b29f6ed8e87c137496c73d93aff0330d97940b4fbed72eae37a4a2a0"}, - {file = "Cython-3.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b1853bc34ced5ff6473e881fcf6de29da83262552c8f268a0df53b49c2b89e2c"}, - {file = "Cython-3.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:51e8164b1270625ff101e95c3c1c234421520c07a0a3a20ded9e9431d98afce7"}, - {file = "Cython-3.0.7-cp39-cp39-win32.whl", hash = "sha256:45319d2471f4dbf19893ca53785a421107266e18b8cccd2054fce1e3f72a85f1"}, - {file = "Cython-3.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:612d83fd1eb5aaa5401a755c1f1aafacd9dab404cd350b90d5f404c98b33e4b3"}, - {file = "Cython-3.0.7-py2.py3-none-any.whl", hash = "sha256:936ec37b261b226d7404eff23a9aad284098338150d42a53d6a9af12b18d3892"}, - {file = "Cython-3.0.7.tar.gz", hash = "sha256:fb299acf3a578573c190c858d49e0cf9d75f4bc49c3f24c5a63804997ef09213"}, + {file = "Cython-3.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a846e0a38e2b24e9a5c5dc74b0e54c6e29420d88d1dafabc99e0fc0f3e338636"}, + {file = "Cython-3.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45523fdc2b78d79b32834cc1cc12dc2ca8967af87e22a3ee1bff20e77c7f5520"}, + {file = "Cython-3.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa0b7f3f841fe087410cab66778e2d3fb20ae2d2078a2be3dffe66c6574be39"}, + {file = "Cython-3.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e87294e33e40c289c77a135f491cd721bd089f193f956f7b8ed5aa2d0b8c558f"}, + {file = "Cython-3.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a1df7a129344b1215c20096d33c00193437df1a8fcca25b71f17c23b1a44f782"}, + {file = "Cython-3.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:13c2a5e57a0358da467d97667297bf820b62a1a87ae47c5f87938b9bb593acbd"}, + {file = "Cython-3.0.8-cp310-cp310-win32.whl", hash = "sha256:96b028f044f5880e3cb18ecdcfc6c8d3ce9d0af28418d5ab464509f26d8adf12"}, + {file = "Cython-3.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:8140597a8b5cc4f119a1190f5a2228a84f5ca6d8d9ec386cfce24663f48b2539"}, + {file = "Cython-3.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aae26f9663e50caf9657148403d9874eea41770ecdd6caf381d177c2b1bb82ba"}, + {file = "Cython-3.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:547eb3cdb2f8c6f48e6865d5a741d9dd051c25b3ce076fbca571727977b28ac3"}, + {file = "Cython-3.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a567d4b9ba70b26db89d75b243529de9e649a2f56384287533cf91512705bee"}, + {file = "Cython-3.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51d1426263b0e82fb22bda8ea60dc77a428581cc19e97741011b938445d383f1"}, + {file = "Cython-3.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c26daaeccda072459b48d211415fd1e5507c06bcd976fa0d5b8b9f1063467d7b"}, + {file = "Cython-3.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:289ce7838208211cd166e975865fd73b0649bf118170b6cebaedfbdaf4a37795"}, + {file = "Cython-3.0.8-cp311-cp311-win32.whl", hash = "sha256:c8aa05f5e17f8042a3be052c24f2edc013fb8af874b0bf76907d16c51b4e7871"}, + {file = "Cython-3.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:000dc9e135d0eec6ecb2b40a5b02d0868a2f8d2e027a41b0fe16a908a9e6de02"}, + {file = "Cython-3.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90d3fe31db55685d8cb97d43b0ec39ef614fcf660f83c77ed06aa670cb0e164f"}, + {file = "Cython-3.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e24791ddae2324e88e3c902a765595c738f19ae34ee66bfb1a6dac54b1833419"}, + {file = "Cython-3.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f020fa1c0552052e0660790b8153b79e3fc9a15dbd8f1d0b841fe5d204a6ae6"}, + {file = "Cython-3.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18bfa387d7a7f77d7b2526af69a65dbd0b731b8d941aaff5becff8e21f6d7717"}, + {file = "Cython-3.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fe81b339cffd87c0069c6049b4d33e28bdd1874625ee515785bf42c9fdff3658"}, + {file = "Cython-3.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80fd94c076e1e1b1ee40a309be03080b75f413e8997cddcf401a118879863388"}, + {file = "Cython-3.0.8-cp312-cp312-win32.whl", hash = "sha256:85077915a93e359a9b920280d214dc0cf8a62773e1f3d7d30fab8ea4daed670c"}, + {file = "Cython-3.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:0cb2dcc565c7851f75d496f724a384a790fab12d1b82461b663e66605bec429a"}, + {file = "Cython-3.0.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:870d2a0a7e3cbd5efa65aecdb38d715ea337a904ea7bb22324036e78fb7068e7"}, + {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e8f2454128974905258d86534f4fd4f91d2f1343605657ecab779d80c9d6d5e"}, + {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1949d6aa7bc792554bee2b67a9fe41008acbfe22f4f8df7b6ec7b799613a4b3"}, + {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9f2c6e1b8f3bcd6cb230bac1843f85114780bb8be8614855b1628b36bb510e0"}, + {file = "Cython-3.0.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:05d7eddc668ae7993643f32c7661f25544e791edb745758672ea5b1a82ecffa6"}, + {file = "Cython-3.0.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bfabe115deef4ada5d23c87bddb11289123336dcc14347011832c07db616dd93"}, + {file = "Cython-3.0.8-cp36-cp36m-win32.whl", hash = "sha256:0c38c9f0bcce2df0c3347285863621be904ac6b64c5792d871130569d893efd7"}, + {file = "Cython-3.0.8-cp36-cp36m-win_amd64.whl", hash = "sha256:6c46939c3983217d140999de7c238c3141f56b1ea349e47ca49cae899969aa2c"}, + {file = "Cython-3.0.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:115f0a50f752da6c99941b103b5cb090da63eb206abbc7c2ad33856ffc73f064"}, + {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c0f29246734561c90f36e70ed0506b61aa3d044e4cc4cba559065a2a741fae"}, + {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ab75242869ff71e5665fe5c96f3378e79e792fa3c11762641b6c5afbbbbe026"}, + {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6717c06e9cfc6c1df18543cd31a21f5d8e378a40f70c851fa2d34f0597037abc"}, + {file = "Cython-3.0.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9d3f74388db378a3c6fd06e79a809ed98df3f56484d317b81ee762dbf3c263e0"}, + {file = "Cython-3.0.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae7ac561fd8253a9ae96311e91d12af5f701383564edc11d6338a7b60b285a6f"}, + {file = "Cython-3.0.8-cp37-cp37m-win32.whl", hash = "sha256:97b2a45845b993304f1799664fa88da676ee19442b15fdcaa31f9da7e1acc434"}, + {file = "Cython-3.0.8-cp37-cp37m-win_amd64.whl", hash = "sha256:9e2be2b340fea46fb849d378f9b80d3c08ff2e81e2bfbcdb656e2e3cd8c6b2dc"}, + {file = "Cython-3.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2cde23c555470db3f149ede78b518e8274853745289c956a0e06ad8d982e4db9"}, + {file = "Cython-3.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7990ca127e1f1beedaf8fc8bf66541d066ef4723ad7d8d47a7cbf842e0f47580"}, + {file = "Cython-3.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b983c8e6803f016146c26854d9150ddad5662960c804ea7f0c752c9266752f0"}, + {file = "Cython-3.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a973268d7ca1a2bdf78575e459a94a78e1a0a9bb62a7db0c50041949a73b02ff"}, + {file = "Cython-3.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:61a237bc9dd23c7faef0fcfce88c11c65d0c9bb73c74ccfa408b3a012073c20e"}, + {file = "Cython-3.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a3d67f079598af49e90ff9655bf85bd358f093d727eb21ca2708f467c489cae"}, + {file = "Cython-3.0.8-cp38-cp38-win32.whl", hash = "sha256:17a642bb01a693e34c914106566f59844b4461665066613913463a719e0dd15d"}, + {file = "Cython-3.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:2cdfc32252f3b6dc7c94032ab744dcedb45286733443c294d8f909a4854e7f83"}, + {file = "Cython-3.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa97893d99385386925d00074654aeae3a98867f298d1e12ceaf38a9054a9bae"}, + {file = "Cython-3.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05c0bf9d085c031df8f583f0d506aa3be1692023de18c45d0aaf78685bbb944"}, + {file = "Cython-3.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de892422582f5758bd8de187e98ac829330ec1007bc42c661f687792999988a7"}, + {file = "Cython-3.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:314f2355a1f1d06e3c431eaad4708cf10037b5e91e4b231d89c913989d0bdafd"}, + {file = "Cython-3.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:78825a3774211e7d5089730f00cdf7f473042acc9ceb8b9eeebe13ed3a5541de"}, + {file = "Cython-3.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:df8093deabc55f37028190cf5e575c26aad23fc673f34b85d5f45076bc37ce39"}, + {file = "Cython-3.0.8-cp39-cp39-win32.whl", hash = "sha256:1aca1b97e0095b3a9a6c33eada3f661a4ed0d499067d121239b193e5ba3bb4f0"}, + {file = "Cython-3.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:16873d78be63bd38ffb759da7ab82814b36f56c769ee02b1d5859560e4c3ac3c"}, + {file = "Cython-3.0.8-py2.py3-none-any.whl", hash = "sha256:171b27051253d3f9108e9759e504ba59ff06e7f7ba944457f94deaf9c21bf0b6"}, + {file = "Cython-3.0.8.tar.gz", hash = "sha256:8333423d8fd5765e7cceea3a9985dd1e0a5dfeb2734629e1a2ed2d6233d39de6"}, ] [[package]] @@ -816,13 +840,13 @@ files = [ [[package]] name = "distlib" -version = "0.3.7" +version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] [[package]] @@ -859,79 +883,69 @@ files = [ [[package]] name = "duckdb" -version = "0.9.2" -description = "DuckDB embedded database" +version = "0.10.0" +description = "DuckDB in-process database" optional = true python-versions = ">=3.7.0" files = [ - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"}, - {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"}, - {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"}, - {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"}, - {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"}, - {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"}, - {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"}, - {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"}, - {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"}, - {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"}, - {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"}, - {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"}, - {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"}, - {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"}, - {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"}, - {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"}, - {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"}, - {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"}, -] - -[[package]] -name = "ecdsa" -version = "0.18.0" -description = "ECDSA cryptographic signature library (pure python)" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, - {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd0ffb3fddef0f72a150e4d76e10942a84a1a0447d10907df1621b90d6668060"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f3d709d5c7c1a12b5e10d0b05fa916c670cd2b50178e3696faa0cc16048a1745"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9114aa22ec5d591a20ce5184be90f49d8e5b5348ceaab21e102c54560d07a5f8"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a37877efadf39caf7cadde0f430fedf762751b9c54750c821e2f1316705a21"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87cbc9e1d9c3fc9f14307bea757f99f15f46843c0ab13a6061354410824ed41f"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f0bfec79fed387201550517d325dff4fad2705020bc139d936cab08b9e845662"}, + {file = "duckdb-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5622134d2d9796b15e09de810e450859d4beb46d9b861357ec9ae40a61b775c"}, + {file = "duckdb-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:089ee8e831ccaef1b73fc89c43b661567175eed0115454880bafed5e35cda702"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a05af63747f1d7021995f0811c333dee7316cec3b06c0d3e4741b9bdb678dd21"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:072d6eba5d8a59e0069a8b5b4252fed8a21f9fe3f85a9129d186a39b3d0aea03"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a77b85668f59b919042832e4659538337f1c7f197123076c5311f1c9cf077df7"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a666f1d2da65d03199a977aec246920920a5ea1da76b70ae02bd4fb1ffc48c"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ec76a4262b783628d26612d184834852d9c92fb203e91af789100c17e3d7173"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:009dd9d2cdbd3b061a9efbdfc79f2d1a8377bcf49f1e5f430138621f8c083a6c"}, + {file = "duckdb-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:878f06766088090dad4a2e5ee0081555242b2e8dcb29415ecc97e388cf0cf8d8"}, + {file = "duckdb-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:713ff0a1fb63a6d60f454acf67f31656549fb5d63f21ac68314e4f522daa1a89"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9c0ee450dfedfb52dd4957244e31820feef17228da31af6d052979450a80fd19"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ff79b2ea9994398b545c0d10601cd73565fbd09f8951b3d8003c7c5c0cebc7cb"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6bdf1aa71b924ef651062e6b8ff9981ad85bec89598294af8a072062c5717340"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0265bbc8216be3ced7b377ba8847128a3fc0ef99798a3c4557c1b88e3a01c23"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d418a315a07707a693bd985274c0f8c4dd77015d9ef5d8d3da4cc1942fd82e0"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2828475a292e68c71855190b818aded6bce7328f79e38c04a0c75f8f1c0ceef0"}, + {file = "duckdb-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3aaeaae2eba97035c65f31ffdb18202c951337bf2b3d53d77ce1da8ae2ecf51"}, + {file = "duckdb-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c51790aaaea97d8e4a58a114c371ed8d2c4e1ca7cbf29e3bdab6d8ccfc5afc1e"}, + {file = "duckdb-0.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8af1ae7cc77a12206b6c47ade191882cc8f49f750bb3e72bb86ac1d4fa89926a"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa4f7e8e8dc0e376aeb280b83f2584d0e25ec38985c27d19f3107b2edc4f4a97"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28ae942a79fad913defa912b56483cd7827a4e7721f4ce4bc9025b746ecb3c89"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01b57802898091455ca2a32c1335aac1e398da77c99e8a96a1e5de09f6a0add9"}, + {file = "duckdb-0.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52e1ad4a55fa153d320c367046b9500578192e01c6d04308ba8b540441736f2c"}, + {file = "duckdb-0.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:904c47d04095af745e989c853f0bfc0776913dfc40dfbd2da7afdbbb5f67fed0"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:184ae7ea5874f3b8fa51ab0f1519bdd088a0b78c32080ee272b1d137e2c8fd9c"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd33982ecc9bac727a032d6cedced9f19033cbad56647147408891eb51a6cb37"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f59bf0949899105dd5f8864cb48139bfb78454a8c017b8258ba2b5e90acf7afc"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:395f3b18948001e35dceb48a4423d574e38656606d033eef375408b539e7b076"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b8eb2b803be7ee1df70435c33b03a4598cdaf676cd67ad782b288dcff65d781"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:31b2ddd331801064326c8e3587a4db8a31d02aef11332c168f45b3bd92effb41"}, + {file = "duckdb-0.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c8b89e76a041424b8c2026c5dc1f74b53fbbc6c6f650d563259885ab2e7d093d"}, + {file = "duckdb-0.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:79084a82f16c0a54f6bfb7ded5600400c2daa90eb0d83337d81a56924eaee5d4"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:79799b3a270dcd9070f677ba510f1e66b112df3068425691bac97c5e278929c7"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8fc394bfe3434920cdbcfbdd0ac3ba40902faa1dbda088db0ba44003a45318a"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c116605551b4abf5786243a59bcef02bd69cc51837d0c57cafaa68cdc428aa0c"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3191170c3b0a43b0c12644800326f5afdea00d5a4621d59dbbd0c1059139e140"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fee69a50eb93c72dc77e7ab1fabe0c38d21a52c5da44a86aa217081e38f9f1bd"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5f449e87dacb16b0d145dbe65fa6fdb5a55b2b6911a46d74876e445dd395bac"}, + {file = "duckdb-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4487d0df221b17ea4177ad08131bc606b35f25cfadf890987833055b9d10cdf6"}, + {file = "duckdb-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:c099ae2ff8fe939fda62da81704f91e2f92ac45e48dc0e37c679c9d243d01e65"}, + {file = "duckdb-0.10.0.tar.gz", hash = "sha256:c02bcc128002aa79e3c9d89b9de25e062d1096a8793bc0d7932317b7977f6845"}, ] -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] - [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -939,42 +953,42 @@ test = ["pytest (>=6)"] [[package]] name = "fastavro" -version = "1.9.2" +version = "1.9.4" description = "Fast read/write of AVRO files" optional = false python-versions = ">=3.8" files = [ - {file = "fastavro-1.9.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:223cecf135fd29b83ca6a30035b15b8db169aeaf8dc4f9a5d34afadc4b31638a"}, - {file = "fastavro-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e08c9be8c6f7eed2cf30f8b64d50094cba38a81b751c7db9f9c4be2656715259"}, - {file = "fastavro-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394f06cc865c6fbae3bbca323633a28a5d914c55dc2c1cdefb75432456ef8f6f"}, - {file = "fastavro-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7a7caadd47bdd04bda534ff70b4b98d2823800c488fd911918115aec4c4dc09b"}, - {file = "fastavro-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:68478a1b8a583d83ad6550e9dceac6cbb148a99a52c3559a0413bf4c0b9c8786"}, - {file = "fastavro-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:b59a1123f1d534743af33fdbda80dd7b9146685bdd7931eae12bee6203065222"}, - {file = "fastavro-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:887c20dc527a549764c91f9e48ece071f2f26d217af66ebcaeb87bf29578fee5"}, - {file = "fastavro-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46458f78b481c12db62d3d8a81bae09cb0b5b521c0d066c6856fc2746908d00d"}, - {file = "fastavro-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f4a2a4bed0e829f79fa1e4f172d484b2179426e827bcc80c0069cc81328a5af"}, - {file = "fastavro-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6167f9bbe1c5a28fbc2db767f97dbbb4981065e6eeafd4e613f6fe76c576ffd4"}, - {file = "fastavro-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d574bc385f820da0404528157238de4e5fdd775d2cb3d05b3b0f1b475d493837"}, - {file = "fastavro-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ec600eb15b3ec931904c5bf8da62b3b725cb0f369add83ba47d7b5e9322f92a0"}, - {file = "fastavro-1.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c82b0761503420cd45f7f50bc31975ac1c75b5118e15434c1d724b751abcc249"}, - {file = "fastavro-1.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db62d9b8c944b8d9c481e5f980d5becfd034bdd58c72e27c9333bd504b06bda0"}, - {file = "fastavro-1.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65e61f040bc9494646f42a466e9cd428783b82d7161173f3296710723ba5a453"}, - {file = "fastavro-1.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6278b93cdd5bef1778c0232ce1f265137f90bc6be97a5c1dd7e0d99a406c0488"}, - {file = "fastavro-1.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cd003ddea5d89720194b6e57011c37221d9fc4ddc750e6f4723516eb659be686"}, - {file = "fastavro-1.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:43f09d100a26e8b59f30dde664d93e423b648e008abfc43132608a18fe8ddcc2"}, - {file = "fastavro-1.9.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:3ddffeff5394f285c69f9cd481f47b6cf62379840cdbe6e0dc74683bd589b56e"}, - {file = "fastavro-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e75a2b2ec697d2058a7d96522e921f03f174cf9049ace007c24be7ab58c5370"}, - {file = "fastavro-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd2e8fd0567483eb0fdada1b979ad4d493305dfdd3f351c82a87df301f0ae1f"}, - {file = "fastavro-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c652dbe3f087c943a5b89f9a50a574e64f23790bfbec335ce2b91a2ae354a443"}, - {file = "fastavro-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba73e9a1822162f1b3a43de0362f29880014c5c4d49d63ad7fcce339ef73ea2"}, - {file = "fastavro-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:beeef2964bbfd09c539424808539b956d7425afbb7055b89e2aa311374748b56"}, - {file = "fastavro-1.9.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:d5fa48266d75e057b27d8586b823d6d7d7c94593fd989d75033eb4c8078009fb"}, - {file = "fastavro-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b69aeb0d063f5955a0e412f9779444fc452568a49db75a90a8d372f9cb4a01c8"}, - {file = "fastavro-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ce336c59fb40fdb8751bda8cc6076cfcdf9767c3c107f6049e049166b26c61f"}, - {file = "fastavro-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:581036e18661f045415a51ad528865e1d7ba5a9690a3dede9e6ea50f94ed6c4c"}, - {file = "fastavro-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b6b5c3cda569c0a130fd2d08d4c53a326ede7e05174a24eda08f7698f70eda"}, - {file = "fastavro-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:d33e40f246bf07f106f9d2da68d0234efcc62276b6e35bde00ff920ea7f871fd"}, - {file = "fastavro-1.9.2.tar.gz", hash = "sha256:5c1ffad986200496bd69b5c4748ae90b5d934d3b1456f33147bee3a0bb17f89b"}, + {file = "fastavro-1.9.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:60cb38f07462a7fb4e4440ed0de67d3d400ae6b3d780f81327bebde9aa55faef"}, + {file = "fastavro-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:063d01d197fc929c20adc09ca9f0ca86d33ac25ee0963ce0b438244eee8315ae"}, + {file = "fastavro-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a9053fcfbc895f2a16a4303af22077e3a8fdcf1cd5d6ed47ff2ef22cbba2f0"}, + {file = "fastavro-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:02bf1276b7326397314adf41b34a4890f6ffa59cf7e0eb20b9e4ab0a143a1598"}, + {file = "fastavro-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56bed9eca435389a8861e6e2d631ec7f8f5dda5b23f93517ac710665bd34ca29"}, + {file = "fastavro-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:0cd2099c8c672b853e0b20c13e9b62a69d3fbf67ee7c59c7271ba5df1680310d"}, + {file = "fastavro-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:af8c6d8c43a02b5569c093fc5467469541ac408c79c36a5b0900d3dd0b3ba838"}, + {file = "fastavro-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a138710bd61580324d23bc5e3df01f0b82aee0a76404d5dddae73d9e4c723f"}, + {file = "fastavro-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:903d97418120ca6b6a7f38a731166c1ccc2c4344ee5e0470d09eb1dc3687540a"}, + {file = "fastavro-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c443eeb99899d062dbf78c525e4614dd77e041a7688fa2710c224f4033f193ae"}, + {file = "fastavro-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ac26ab0774d1b2b7af6d8f4300ad20bbc4b5469e658a02931ad13ce23635152f"}, + {file = "fastavro-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:cf7247874c22be856ba7d1f46a0f6e0379a6025f1a48a7da640444cbac6f570b"}, + {file = "fastavro-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:68912f2020e1b3d70557260b27dd85fb49a4fc6bfab18d384926127452c1da4c"}, + {file = "fastavro-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6925ce137cdd78e109abdb0bc33aad55de6c9f2d2d3036b65453128f2f5f5b92"}, + {file = "fastavro-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b928cd294e36e35516d0deb9e104b45be922ba06940794260a4e5dbed6c192a"}, + {file = "fastavro-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:90c9838bc4c991ffff5dd9d88a0cc0030f938b3fdf038cdf6babde144b920246"}, + {file = "fastavro-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:eca6e54da571b06a3c5a72dbb7212073f56c92a6fbfbf847b91c347510f8a426"}, + {file = "fastavro-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a4b02839ac261100cefca2e2ad04cdfedc556cb66b5ec735e0db428e74b399de"}, + {file = "fastavro-1.9.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4451ee9a305a73313a1558d471299f3130e4ecc10a88bf5742aa03fb37e042e6"}, + {file = "fastavro-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8524fccfb379565568c045d29b2ebf71e1f2c0dd484aeda9fe784ef5febe1a8"}, + {file = "fastavro-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d0a00a6e09baa20f6f038d7a2ddcb7eef0e7a9980e947a018300cb047091b8"}, + {file = "fastavro-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:23d7e5b29c9bf6f26e8be754b2c8b919838e506f78ef724de7d22881696712fc"}, + {file = "fastavro-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e6ab3ee53944326460edf1125b2ad5be2fadd80f7211b13c45fa0c503b4cf8d"}, + {file = "fastavro-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:64d335ec2004204c501f8697c385d0a8f6b521ac82d5b30696f789ff5bc85f3c"}, + {file = "fastavro-1.9.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:7e05f44c493e89e73833bd3ff3790538726906d2856f59adc8103539f4a1b232"}, + {file = "fastavro-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:253c63993250bff4ee7b11fb46cf3a4622180a783bedc82a24c6fdcd1b10ca2a"}, + {file = "fastavro-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24d6942eb1db14640c2581e0ecd1bbe0afc8a83731fcd3064ae7f429d7880cb7"}, + {file = "fastavro-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d47bb66be6091cd48cfe026adcad11c8b11d7d815a2949a1e4ccf03df981ca65"}, + {file = "fastavro-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c293897f12f910e58a1024f9c77f565aa8e23b36aafda6ad8e7041accc57a57f"}, + {file = "fastavro-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:f05d2afcb10a92e2a9e580a3891f090589b3e567fdc5641f8a46a0b084f120c3"}, + {file = "fastavro-1.9.4.tar.gz", hash = "sha256:56b8363e360a1256c94562393dc7f8611f3baf2b3159f64fb2b9c6b87b14e876"}, ] [package.extras] @@ -985,29 +999,29 @@ zstandard = ["zstandard"] [[package]] name = "filelock" -version = "3.12.4" +version = "3.13.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, - {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] -typing = ["typing-extensions (>=4.7.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "flask" -version = "3.0.0" +version = "3.0.2" description = "A simple framework for building complex web applications." optional = false python-versions = ">=3.8" files = [ - {file = "flask-3.0.0-py3-none-any.whl", hash = "sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638"}, - {file = "flask-3.0.0.tar.gz", hash = "sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58"}, + {file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"}, + {file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"}, ] [package.dependencies] @@ -1038,85 +1052,105 @@ Flask = ">=0.9" [[package]] name = "frozenlist" -version = "1.4.0" +version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" optional = true python-versions = ">=3.8" files = [ - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, - {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, - {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, - {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, - {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, - {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, - {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, - {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, - {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, - {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] [[package]] name = "fsspec" -version = "2023.9.1" +version = "2023.12.2" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.9.1-py3-none-any.whl", hash = "sha256:99a974063b6cced36cfaa61aa8efb05439c6fea2dafe65930e7ab46f9d2f8930"}, - {file = "fsspec-2023.9.1.tar.gz", hash = "sha256:da8cfe39eeb65aaa69074d5e0e4bbc9b7ef72d69c0587a31cab981eefdb3da13"}, + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, ] +[package.dependencies] +aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} +requests = {version = "*", optional = true, markers = "extra == \"http\""} + [package.extras] abfs = ["adlfs"] adl = ["adlfs"] @@ -1143,19 +1177,19 @@ tqdm = ["tqdm"] [[package]] name = "gcsfs" -version = "2023.9.1" +version = "2023.12.2.post1" description = "Convenient Filesystem interface over GCS" optional = true python-versions = ">=3.8" files = [ - {file = "gcsfs-2023.9.1-py2.py3-none-any.whl", hash = "sha256:c673caf901fc923d121399782394aba9c99dac4332a15ba21bd320d0b7f46521"}, - {file = "gcsfs-2023.9.1.tar.gz", hash = "sha256:47698bba0468896bfb33749552fe459fa745069119d7c2605a53bf4f3a1f09ac"}, + {file = "gcsfs-2023.12.2.post1-py2.py3-none-any.whl", hash = "sha256:4123cee2c44118d4c0c0f7405abe7610dd2d87087857520c6a7769765ec51d43"}, + {file = "gcsfs-2023.12.2.post1.tar.gz", hash = "sha256:e38b7e59580a1e490d62d55a47cba33b49a941b01917c3d6f6cfd2563371ab7b"}, ] [package.dependencies] aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" decorator = ">4.1.2" -fsspec = "2023.9.1" +fsspec = "2023.12.2" google-auth = ">=1.2" google-auth-oauthlib = "*" google-cloud-storage = "*" @@ -1165,15 +1199,47 @@ requests = "*" crc = ["crcmod"] gcsfuse = ["fusepy"] +[[package]] +name = "getdaft" +version = "0.2.16" +description = "Distributed Dataframes for Multimodal Data" +optional = true +python-versions = ">=3.7" +files = [ + {file = "getdaft-0.2.16-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:0a355301e79e00ab639150b84d380465f5f69ef9e6f36f1b5cf376e3d24229f6"}, + {file = "getdaft-0.2.16-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:317a8dff8169638cea40efbc01193d51f31c4ab441fc39f01f163f197fc264a2"}, + {file = "getdaft-0.2.16-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb1e5a62fcbb4a909532bb64dc7af56e7ac3fef1b8220448fcae1a8af0c6bc4"}, + {file = "getdaft-0.2.16-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b932510fd24b4f1f021abd67016bbcdacda3315d0e3ee2a8e339d82719adbd51"}, + {file = "getdaft-0.2.16-cp37-abi3-win_amd64.whl", hash = "sha256:c3e74f56b211f88e5c335276fe4670a0dfac8dc8b5c684b22fc570b1350cc40d"}, + {file = "getdaft-0.2.16.tar.gz", hash = "sha256:3fc7b2c3373bc374a90ecc566c6f0d830b9ce751d6c930c96b70b2c4c2afa0c4"}, +] + +[package.dependencies] +fsspec = {version = "*", extras = ["http"]} +psutil = "*" +pyarrow = ">=6.0.1" +tqdm = "*" +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["getdaft[aws,azure,gcp,iceberg,numpy,pandas,ray]"] +aws = ["s3fs"] +azure = ["adlfs"] +gcp = ["gcsfs"] +iceberg = ["packaging", "pyiceberg (>=0.4.0)"] +numpy = ["numpy"] +pandas = ["pandas"] +ray = ["packaging", "ray[client,data] (>=2.0.0)"] + [[package]] name = "google-api-core" -version = "2.11.1" +version = "2.17.1" description = "Google API client core library" optional = true python-versions = ">=3.7" files = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, + {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, + {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, ] [package.dependencies] @@ -1189,20 +1255,19 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.23.0" +version = "2.28.0" description = "Google Authentication Library" optional = true python-versions = ">=3.7" files = [ - {file = "google-auth-2.23.0.tar.gz", hash = "sha256:753a26312e6f1eaeec20bc6f2644a10926697da93446e1f8e24d6d32d45a922a"}, - {file = "google_auth-2.23.0-py2.py3-none-any.whl", hash = "sha256:2cec41407bd1e207f5b802638e32bb837df968bb5c05f413d0fa526fac4cf7a7"}, + {file = "google-auth-2.28.0.tar.gz", hash = "sha256:3cfc1b6e4e64797584fb53fc9bd0b7afa9b7c0dba2004fa7dcc9349e58cc3195"}, + {file = "google_auth-2.28.0-py2.py3-none-any.whl", hash = "sha256:7634d29dcd1e101f5226a23cbc4a0c6cda6394253bf80e281d9c5c6797869c53"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" -urllib3 = "<2.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] @@ -1213,13 +1278,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-auth-oauthlib" -version = "1.1.0" +version = "1.2.0" description = "Google Authentication Library" optional = true python-versions = ">=3.6" files = [ - {file = "google-auth-oauthlib-1.1.0.tar.gz", hash = "sha256:83ea8c3b0881e453790baff4448e8a6112ac8778d1de9da0b68010b843937afb"}, - {file = "google_auth_oauthlib-1.1.0-py2.py3-none-any.whl", hash = "sha256:089c6e587d36f4803ac7e0720c045c6a8b1fd1790088b8424975b90d0ee61c12"}, + {file = "google-auth-oauthlib-1.2.0.tar.gz", hash = "sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8"}, + {file = "google_auth_oauthlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf"}, ] [package.dependencies] @@ -1231,13 +1296,13 @@ tool = ["click (>=6.0.0)"] [[package]] name = "google-cloud-core" -version = "2.3.3" +version = "2.4.1" description = "Google Cloud API client core library" optional = true python-versions = ">=3.7" files = [ - {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"}, - {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"}, + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, ] [package.dependencies] @@ -1245,23 +1310,24 @@ google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" [package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)"] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "2.11.0" +version = "2.14.0" description = "Google Cloud Storage API client library" optional = true python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.11.0.tar.gz", hash = "sha256:6fbf62659b83c8f3a0a743af0d661d2046c97c3a5bfb587c4662c4bc68de3e31"}, - {file = "google_cloud_storage-2.11.0-py2.py3-none-any.whl", hash = "sha256:88cbd7fb3d701c780c4272bc26952db99f25eb283fb4c2208423249f00b5fe53"}, + {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, + {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, ] [package.dependencies] google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" +google-auth = ">=2.23.3,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" google-resumable-media = ">=2.6.0" requests = ">=2.18.0,<3.0.0dev" @@ -1350,13 +1416,13 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.6.0" +version = "2.7.0" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = true python-versions = ">= 3.7" files = [ - {file = "google-resumable-media-2.6.0.tar.gz", hash = "sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7"}, - {file = "google_resumable_media-2.6.0-py2.py3-none-any.whl", hash = "sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b"}, + {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, + {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, ] [package.dependencies] @@ -1368,13 +1434,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.60.0" +version = "1.62.0" description = "Common protobufs used in Google APIs" optional = true python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, - {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, ] [package.dependencies] @@ -1396,90 +1462,84 @@ files = [ [[package]] name = "greenlet" -version = "2.0.2" +version = "3.0.3" description = "Lightweight in-process concurrent programming" optional = true -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, ] [package.extras] -docs = ["Sphinx", "docutils (<0.18)"] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] name = "identify" -version = "2.5.29" +version = "2.5.35" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.29-py2.py3-none-any.whl", hash = "sha256:24437fbf6f4d3fe6efd0eb9d67e24dd9106db99af5ceb27996a5f7895f24bf1b"}, - {file = "identify-2.5.29.tar.gz", hash = "sha256:d43d52b86b15918c137e3a74fff5224f60385cd0e9c38e99d07c257f02f151a5"}, + {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, + {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, ] [package.extras] @@ -1487,51 +1547,51 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "7.0.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" -version = "6.1.0" +version = "5.13.0" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, - {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, + {file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"}, + {file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [[package]] name = "iniconfig" @@ -1571,13 +1631,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1597,6 +1657,23 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "joserfc" +version = "0.9.0" +description = "The ultimate Python library for JOSE RFCs, including JWS, JWE, JWK, JWA, JWT" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joserfc-0.9.0-py3-none-any.whl", hash = "sha256:4026bdbe2c196cd40574e916fa1e28874d99649412edaab0e373dec3077153fb"}, + {file = "joserfc-0.9.0.tar.gz", hash = "sha256:eebca7f587b1761ce43a98ffd5327f2b600b9aa5bb0a77b947687f503ad43bc0"}, +] + +[package.dependencies] +cryptography = "*" + +[package.extras] +drafts = ["pycryptodome"] + [[package]] name = "jschema-to-python" version = "1.2.3" @@ -1640,19 +1717,18 @@ jsonpointer = ">=1.9" [[package]] name = "jsonpickle" -version = "3.0.2" +version = "3.0.3" description = "Python library for serializing any arbitrary object graph into JSON" optional = false python-versions = ">=3.7" files = [ - {file = "jsonpickle-3.0.2-py3-none-any.whl", hash = "sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f"}, - {file = "jsonpickle-3.0.2.tar.gz", hash = "sha256:e37abba4bfb3ca4a4647d28bb9f4706436f7b46c8a8333b4a718abafa8e46b37"}, + {file = "jsonpickle-3.0.3-py3-none-any.whl", hash = "sha256:e8d6dcc58f6722bea0321cd328fbda81c582461185688a535df02be0f699afb4"}, + {file = "jsonpickle-3.0.3.tar.gz", hash = "sha256:5691f44495327858ab3a95b9c440a79b41e35421be1a6e09a47b6c9b9421fd06"}, ] [package.extras] -docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] -testing-libs = ["simplejson", "ujson"] +docs = ["furo", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] +testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-ruff", "scikit-learn", "simplejson", "sqlalchemy", "ujson"] [[package]] name = "jsonpointer" @@ -1667,13 +1743,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.19.1" +version = "4.21.1" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, - {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, ] [package.dependencies] @@ -1689,36 +1765,36 @@ format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validat format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] -name = "jsonschema-path" -version = "0.3.2" +name = "jsonschema-spec" +version = "0.1.3" description = "JSONSchema Spec with object-oriented paths" optional = false -python-versions = ">=3.8.0,<4.0.0" +python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "jsonschema_path-0.3.2-py3-none-any.whl", hash = "sha256:271aedfefcd161a0f467bdf23e1d9183691a61eaabf4b761046a914e369336c7"}, - {file = "jsonschema_path-0.3.2.tar.gz", hash = "sha256:4d0dababf341e36e9b91a5fb2a3e3fd300b0150e7fe88df4e55cc8253c5a3989"}, + {file = "jsonschema_spec-0.1.3-py3-none-any.whl", hash = "sha256:b3cde007ad65c2e631e2f8653cf187124a2c714d02d9fafbab68ad64bf5745d6"}, + {file = "jsonschema_spec-0.1.3.tar.gz", hash = "sha256:8d8db7c255e524fab1016a952a9143e5b6e3c074f4ed25d1878f8e97806caec0"}, ] [package.dependencies] +jsonschema = ">=4.0.0,<5.0.0" pathable = ">=0.4.1,<0.5.0" PyYAML = ">=5.1" -referencing = ">=0.28.0,<0.32.0" -requests = ">=2.31.0,<3.0.0" +typing-extensions = ">=4.3.0,<5.0.0" [[package]] name = "jsonschema-specifications" -version = "2023.7.1" +version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, - {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, ] [package.dependencies] importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -referencing = ">=0.28.0" +referencing = ">=0.31.0" [[package]] name = "junit-xml" @@ -1806,71 +1882,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -1885,109 +1961,150 @@ files = [ ] [[package]] -name = "mmhash3" -version = "3.0.1" -description = "Python wrapper for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +name = "mmh3" +version = "4.1.0" +description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." optional = false python-versions = "*" files = [ - {file = "mmhash3-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47deea30cd8d3d5cd52dc740902a4c70383bfe8248eac29d0877fe63e03c2713"}, - {file = "mmhash3-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ecdaf4d1de617818bf05cd526ca558db6010beeba7ea9e19f695f2bdcac0e0a4"}, - {file = "mmhash3-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4675585617584e9e1aafa3a90ac0ac9a437257c507748d97de8b21977e9d6745"}, - {file = "mmhash3-3.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebfd0c2af09b41f0fe1dd260799bda90a0fc7eba4477ccaeb3951527700cd58f"}, - {file = "mmhash3-3.0.1-cp310-cp310-win32.whl", hash = "sha256:68587dec7b8acdb7528fd511e295d8b5ccfe26022923a69867e1822f0fdb4c44"}, - {file = "mmhash3-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:54954ebe3a614f19ab4cfe24fa66ea093c493d9fac0533d002dd64c2540a0c99"}, - {file = "mmhash3-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b172f3bd3220b0bf56fd7cc760fd8a9033def47103611d63fe867003079a1256"}, - {file = "mmhash3-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de7895eafabc32f7c0c09a81a624921f536768de6e438e3de69e3e954a4d7072"}, - {file = "mmhash3-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b0914effe4ddd8d33149e3508564c17719465b0cc81691c4fa50d5e0e14f80"}, - {file = "mmhash3-3.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0575050ac691475938df1ad03d8738c5bd1eadef62093e76157ebb7f2df0946"}, - {file = "mmhash3-3.0.1-cp311-cp311-win32.whl", hash = "sha256:22f92f0f88f28b215357acd346362fa9f7c9fffb436beb42cc4b442b676dbaa3"}, - {file = "mmhash3-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:538240ab7936bf71b18304e5a7e7fd3c4c2fab103330ea99584bb4f777299a2b"}, - {file = "mmhash3-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ca791bfb311e36ce13998e4632262ed4b95da9d3461941e18b6690760171a045"}, - {file = "mmhash3-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b41708f72c6aa2a49ada1f0b61e85c05cd8389ad31d463fd5bca29999a4d5f9c"}, - {file = "mmhash3-3.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3ce9b4533ddc0a88ba045a27309714c3b127bd05e57fd252d1d5a71d4247ea7"}, - {file = "mmhash3-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:bfafeb96fdeb10db8767d06e1f07b6fdcddba4aaa0dd15058561a49f7ae45345"}, - {file = "mmhash3-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:97fe077b24c948709ed2afc749bf6285d407bc54ff12c63d2dc86678c38a0b8e"}, - {file = "mmhash3-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0cfd91ccd5fca1ba7ee925297131a15dfb94c714bfe6ba0fb3b1ca78b12bbfec"}, - {file = "mmhash3-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d51b1005233141ce7394531af40a3f0fc1f274467bf8dff44dcf7987924fe58"}, - {file = "mmhash3-3.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:855c67b100e37df166acf79cdff58fa8f9f6c48be0d1e1b6e9ad0fa34a9661ef"}, - {file = "mmhash3-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:bb3030df1334fd665427f8be8e8ce4f04aeab7f6010ce4f2c128f0099bdab96f"}, - {file = "mmhash3-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1545e1177294afe4912d5a5e401c7fa9b799dd109b30289e7af74d5b07e7c474"}, - {file = "mmhash3-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2479899e7dda834a671991a1098a691ab1c2eaa20c3e939d691ca4a19361cfe0"}, - {file = "mmhash3-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9056196d5e3d3d844433a63d806a683f710ab3aaf1c910550c7746464bc43ae"}, - {file = "mmhash3-3.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d4c307af0bf70207305f70f131898be071d1b19a89f462b13487f5c25e8d4e"}, - {file = "mmhash3-3.0.1-cp38-cp38-win32.whl", hash = "sha256:5f885f65e329fd14bc38debac4a79eacf381e856965d9c65c4d1c6946ea190d0"}, - {file = "mmhash3-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:3b42d0bda5e1cd22c18b16887b0521060fb59d0aaaaf033feacbc0a2492d20fe"}, - {file = "mmhash3-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3f333286bb87aa9dc6bd8e7960a55a27b011a401f24b889a50e6d219f65e7c9"}, - {file = "mmhash3-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b7ef2eb95a18bcd02ce0d3e047adde3a025afd96c1d266a8a0d44574f44a307"}, - {file = "mmhash3-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6ac8a5f511c60f341bf9cae462bb4941abb149d98464ba5f4f4548875b601c6"}, - {file = "mmhash3-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efef9e632e6e248e46f52d108a5ebd1f28edaf427b7fd47ebf97dbff4b2cab81"}, - {file = "mmhash3-3.0.1-cp39-cp39-win32.whl", hash = "sha256:bdac06d72e448c67afb12e758b203d875e29d4097bb279a38a5649d44b518ba7"}, - {file = "mmhash3-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0baeaa20cac5f75ed14f28056826bd9d9c8b2354b382073f3fd5190708992a0d"}, - {file = "mmhash3-3.0.1.tar.gz", hash = "sha256:a00d68f4a1cc434b9501513c8a29e18ed1ddad383677d72b41d71d0d862348af"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, + {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, + {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, + {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, + {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, + {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, + {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, + {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, + {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, + {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, + {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, + {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, + {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, + {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, + {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, + {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, ] +[package.extras] +test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] + [[package]] name = "moto" -version = "4.2.12" +version = "5.0.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "moto-4.2.12-py2.py3-none-any.whl", hash = "sha256:bdcad46e066a55b7d308a786e5dca863b3cba04c6239c6974135a48d1198b3ab"}, - {file = "moto-4.2.12.tar.gz", hash = "sha256:7c4d37f47becb4a0526b64df54484e988c10fde26861fc3b5c065bc78800cb59"}, + {file = "moto-5.0.2-py2.py3-none-any.whl", hash = "sha256:71bb832a18b64f10fc4cec117b9b0e2305e5831d9a17eb74f6b9819ed7613843"}, + {file = "moto-5.0.2.tar.gz", hash = "sha256:7e27395e5c63ff9554ae14b5baa41bfe6d6b1be0e59eb02977c6ce28411246de"}, ] [package.dependencies] aws-xray-sdk = {version = ">=0.93,<0.96 || >0.96", optional = true, markers = "extra == \"server\""} boto3 = ">=1.9.201" -botocore = ">=1.12.201" +botocore = ">=1.14.0" cfn-lint = {version = ">=0.40.0", optional = true, markers = "extra == \"server\""} cryptography = ">=3.3.1" docker = {version = ">=3.0.0", optional = true, markers = "extra == \"server\""} -ecdsa = {version = "!=0.15", optional = true, markers = "extra == \"server\""} flask = {version = "<2.2.0 || >2.2.0,<2.2.1 || >2.2.1", optional = true, markers = "extra == \"server\""} flask-cors = {version = "*", optional = true, markers = "extra == \"server\""} graphql-core = {version = "*", optional = true, markers = "extra == \"server\""} Jinja2 = ">=2.10.1" +joserfc = {version = ">=0.9.0", optional = true, markers = "extra == \"server\""} jsondiff = {version = ">=1.1.2", optional = true, markers = "extra == \"server\""} openapi-spec-validator = {version = ">=0.5.0", optional = true, markers = "extra == \"server\""} -py-partiql-parser = {version = "0.5.0", optional = true, markers = "extra == \"server\""} +py-partiql-parser = {version = "0.5.1", optional = true, markers = "extra == \"server\""} pyparsing = {version = ">=3.0.7", optional = true, markers = "extra == \"server\""} python-dateutil = ">=2.1,<3.0.0" -python-jose = {version = ">=3.1.0,<4.0.0", extras = ["cryptography"], optional = true, markers = "extra == \"server\""} PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"server\""} requests = ">=2.5" -responses = ">=0.13.0" +responses = ">=0.15.0" setuptools = {version = "*", optional = true, markers = "extra == \"server\""} -sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"server\""} werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] -apigatewayv2 = ["PyYAML (>=5.1)"] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] -ds = ["sshpubkeys (>=3.1.0)"] -dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"] -dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"] -ebs = ["sshpubkeys (>=3.1.0)"] -ec2 = ["sshpubkeys (>=3.1.0)"] -efs = ["sshpubkeys (>=3.1.0)"] -eks = ["sshpubkeys (>=3.1.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"] -route53resolver = ["sshpubkeys (>=3.1.0)"] -s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.0)"] -s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.0)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.1)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.1)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] ssm = ["PyYAML (>=5.1)"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] @@ -2010,13 +2127,13 @@ tests = ["pytest (>=4.6)"] [[package]] name = "msal" -version = "1.24.0" -description = "The Microsoft Authentication Library (MSAL) for Python library" +version = "1.26.0" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = true python-versions = ">=2.7" files = [ - {file = "msal-1.24.0-py2.py3-none-any.whl", hash = "sha256:a7f2f342b80ba3fe168218003b6798cc81b83c9745284bf63fb8d4ec8e2dbc50"}, - {file = "msal-1.24.0.tar.gz", hash = "sha256:7d2ecdad41a5f73bb2b813f3061a4cf47c924621105a8ed137586fcb9d8f827e"}, + {file = "msal-1.26.0-py2.py3-none-any.whl", hash = "sha256:be77ba6a8f49c9ff598bbcdc5dfcf1c9842f3044300109af738e8c3e371065b5"}, + {file = "msal-1.26.0.tar.gz", hash = "sha256:224756079fe338be838737682b49f8ebc20a87c1c5eeaf590daae4532b83de15"}, ] [package.dependencies] @@ -2029,179 +2146,196 @@ broker = ["pymsalruntime (>=0.13.2,<0.14)"] [[package]] name = "msal-extensions" -version = "1.0.0" +version = "1.1.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." optional = true -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "msal-extensions-1.0.0.tar.gz", hash = "sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354"}, - {file = "msal_extensions-1.0.0-py2.py3-none-any.whl", hash = "sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee"}, + {file = "msal-extensions-1.1.0.tar.gz", hash = "sha256:6ab357867062db7b253d0bd2df6d411c7891a0ee7308d54d1e4317c1d1c54252"}, + {file = "msal_extensions-1.1.0-py3-none-any.whl", hash = "sha256:01be9711b4c0b1a151450068eeb2c4f0997df3bba085ac299de3a66f585e382f"}, ] [package.dependencies] msal = ">=0.4.1,<2.0.0" +packaging = "*" portalocker = [ - {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""}, - {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, + {version = ">=1.0,<3", markers = "platform_system != \"Windows\""}, + {version = ">=1.6,<3", markers = "platform_system == \"Windows\""}, ] [[package]] name = "msgpack" -version = "1.0.6" +version = "1.0.7" description = "MessagePack serializer" optional = true python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f4321692e7f299277e55f322329b2c972d93bb612d85f3fda8741bec5c6285ce"}, - {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f0e36a5fa7a182cde391a128a64f437657d2b9371dfa42eda3436245adccbf5"}, - {file = "msgpack-1.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5c8dd9a386a66e50bd7fa22b7a49fb8ead2b3574d6bd69eb1caced6caea0803"}, - {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f85200ea102276afdd3749ca94747f057bbb868d1c52921ee2446730b508d0f"}, - {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a006c300e82402c0c8f1ded11352a3ba2a61b87e7abb3054c845af2ca8d553c"}, - {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bbf47ea5a6ff20c23426106e81863cdbb5402de1825493026ce615039cc99d"}, - {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04450e4b5e1e662e7c86b6aafb7c230af9334fd0becf5e6b80459a507884241c"}, - {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b06a5095a79384760625b5de3f83f40b3053a385fb893be8a106fbbd84c14980"}, - {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3910211b0ab20be3a38e0bb944ed45bd4265d8d9f11a3d1674b95b298e08dd5c"}, - {file = "msgpack-1.0.6-cp310-cp310-win32.whl", hash = "sha256:1dc67b40fe81217b308ab12651adba05e7300b3a2ccf84d6b35a878e308dd8d4"}, - {file = "msgpack-1.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:885de1ed5ea01c1bfe0a34c901152a264c3c1f8f1d382042b92ea354bd14bb0e"}, - {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:099c3d8a027367e1a6fc55d15336f04ff65c60c4f737b5739f7db4525c65fe9e"}, - {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b88dc97ba86c96b964c3745a445d9a65f76fe21955a953064fe04adb63e9367"}, - {file = "msgpack-1.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:00ce5f827d4f26fc094043e6f08b6069c1b148efa2631c47615ae14fb6cafc89"}, - {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd6af61388be65a8701f5787362cb54adae20007e0cc67ca9221a4b95115583b"}, - {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:652e4b7497825b0af6259e2c54700e6dc33d2fc4ed92b8839435090d4c9cc911"}, - {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b08676a17e3f791daad34d5fcb18479e9c85e7200d5a17cbe8de798643a7e37"}, - {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:229ccb6713c8b941eaa5cf13dc7478eba117f21513b5893c35e44483e2f0c9c8"}, - {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:95ade0bd4cf69e04e8b8f8ec2d197d9c9c4a9b6902e048dc7456bf6d82e12a80"}, - {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b16344032a27b2ccfd341f89dadf3e4ef6407d91e4b93563c14644a8abb3ad7"}, - {file = "msgpack-1.0.6-cp311-cp311-win32.whl", hash = "sha256:55bb4a1bf94e39447bc08238a2fb8a767460388a8192f67c103442eb36920887"}, - {file = "msgpack-1.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:ae97504958d0bc58c1152045c170815d5c4f8af906561ce044b6358b43d0c97e"}, - {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ecf431786019a7bfedc28281531d706627f603e3691d64eccdbce3ecd353823"}, - {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a635aecf1047255576dbb0927cbf9a7aa4a68e9d54110cc3c926652d18f144e0"}, - {file = "msgpack-1.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:102cfb54eaefa73e8ca1e784b9352c623524185c98e057e519545131a56fb0af"}, - {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c5e05e4f5756758c58a8088aa10dc70d851c89f842b611fdccfc0581c1846bc"}, - {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68569509dd015fcdd1e6b2b3ccc8c51fd27d9a97f461ccc909270e220ee09685"}, - {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf652839d16de91fe1cfb253e0a88db9a548796939533894e07f45d4bdf90a5f"}, - {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14db7e1b7a7ed362b2f94897bf2486c899c8bb50f6e34b2db92fe534cdab306f"}, - {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:159cfec18a6e125dd4723e2b1de6f202b34b87c850fb9d509acfd054c01135e9"}, - {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6a01a072b2219b65a6ff74df208f20b2cac9401c60adb676ee34e53b4c651077"}, - {file = "msgpack-1.0.6-cp312-cp312-win32.whl", hash = "sha256:e36560d001d4ba469d469b02037f2dd404421fd72277d9474efe9f03f83fced5"}, - {file = "msgpack-1.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:5e7fae9ca93258a956551708cf60dc6c8145574e32ce8c8c4d894e63bcb04341"}, - {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:40b801b768f5a765e33c68f30665d3c6ee1c8623a2d2bb78e6e59f2db4e4ceb7"}, - {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da057d3652e698b00746e47f06dbb513314f847421e857e32e1dc61c46f6c052"}, - {file = "msgpack-1.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f75114c05ec56566da6b55122791cf5bb53d5aada96a98c016d6231e03132f76"}, - {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61213482b5a387ead9e250e9e3cb290292feca39dc83b41c3b1b7b8ffc8d8ecb"}, - {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae6c561f11b444b258b1b4be2bdd1e1cf93cd1d80766b7e869a79db4543a8a8"}, - {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:619a63753ba9e792fe3c6c0fc2b9ee2cfbd92153dd91bee029a89a71eb2942cd"}, - {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:70843788c85ca385846a2d2f836efebe7bb2687ca0734648bf5c9dc6c55602d2"}, - {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fb4571efe86545b772a4630fee578c213c91cbcfd20347806e47fd4e782a18fe"}, - {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bbb4448a05d261fae423d5c0b0974ad899f60825bc77eabad5a0c518e78448c2"}, - {file = "msgpack-1.0.6-cp38-cp38-win32.whl", hash = "sha256:5cd67674db3c73026e0a2c729b909780e88bd9cbc8184256f9567640a5d299a8"}, - {file = "msgpack-1.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:a1cf98afa7ad5e7012454ca3fde254499a13f9d92fd50cb46118118a249a1355"}, - {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d6d25b8a5c70e2334ed61a8da4c11cd9b97c6fbd980c406033f06e4463fda006"}, - {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88cdb1da7fdb121dbb3116910722f5acab4d6e8bfcacab8fafe27e2e7744dc6a"}, - {file = "msgpack-1.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b5658b1f9e486a2eec4c0c688f213a90085b9cf2fec76ef08f98fdf6c62f4b9"}, - {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76820f2ece3b0a7c948bbb6a599020e29574626d23a649476def023cbb026787"}, - {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c780d992f5d734432726b92a0c87bf1857c3d85082a8dea29cbf56e44a132b3"}, - {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0ed35d6d6122d0baa9a1b59ebca4ee302139f4cfb57dab85e4c73ab793ae7ed"}, - {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32c0aff31f33033f4961abc01f78497e5e07bac02a508632aef394b384d27428"}, - {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:35ad5aed9b52217d4cea739d0ea3a492a18dd86fecb4b132668a69f27fb0363b"}, - {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47275ff73005a3e5e146e50baa2378e1730cba6e292f0222bc496a8e4c4adfc8"}, - {file = "msgpack-1.0.6-cp39-cp39-win32.whl", hash = "sha256:7baf16fd8908a025c4a8d7b699103e72d41f967e2aee5a2065432bcdbd9fd06e"}, - {file = "msgpack-1.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:fc97aa4b4fb928ff4d3b74da7c30b360d0cb3ede49a5a6e1fd9705f49aea1deb"}, - {file = "msgpack-1.0.6.tar.gz", hash = "sha256:25d3746da40f3c8c59c3b1d001e49fd2aa17904438f980d9a391370366df001e"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"}, + {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"}, + {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"}, + {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"}, + {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"}, + {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"}, + {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"}, + {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"}, + {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"}, + {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"}, + {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"}, + {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, ] [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" optional = true python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] name = "mypy-boto3-glue" -version = "1.34.7" -description = "Type annotations for boto3.Glue 1.34.7 service generated with mypy-boto3-builder 7.23.0" +version = "1.34.35" +description = "Type annotations for boto3.Glue 1.34.35 service generated with mypy-boto3-builder 7.23.1" optional = true python-versions = ">=3.8" files = [ - {file = "mypy-boto3-glue-1.34.7.tar.gz", hash = "sha256:057a082527e3884ec72b499573094af9dbe0bd3639b26772ec883a17d2c22d12"}, - {file = "mypy_boto3_glue-1.34.7-py3-none-any.whl", hash = "sha256:59e3cced5c3367f0eb2136b62efeb7914f2779a17e574e3c965d5567c7c63de1"}, + {file = "mypy-boto3-glue-1.34.35.tar.gz", hash = "sha256:f8abe4f2e07d299a7b9b0dec30033a9877414e790ee49f274950a5b6d9dd3036"}, + {file = "mypy_boto3_glue-1.34.35-py3-none-any.whl", hash = "sha256:361b21c108ffae7868a25256aaeeca6f28fd1072cbeebebb9c4c4dbc3cbdc721"}, ] [package.dependencies] @@ -2276,6 +2410,51 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = true +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -2294,47 +2473,49 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "openapi-schema-validator" -version = "0.6.2" +version = "0.4.3" description = "OpenAPI schema validation for Python" optional = false -python-versions = ">=3.8.0,<4.0.0" +python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "openapi_schema_validator-0.6.2-py3-none-any.whl", hash = "sha256:c4887c1347c669eb7cded9090f4438b710845cd0f90d1fb9e1b3303fb37339f8"}, - {file = "openapi_schema_validator-0.6.2.tar.gz", hash = "sha256:11a95c9c9017912964e3e5f2545a5b11c3814880681fcacfb73b1759bb4f2804"}, + {file = "openapi_schema_validator-0.4.3-py3-none-any.whl", hash = "sha256:f1eff2a7936546a3ce62b88a17d09de93c9bd229cbc43cb696c988a61a382548"}, + {file = "openapi_schema_validator-0.4.3.tar.gz", hash = "sha256:6940dba9f4906c97078fea6fd9d5a3a3384207db368c4e32f6af6abd7c5c560b"}, ] [package.dependencies] -jsonschema = ">=4.19.1,<5.0.0" -jsonschema-specifications = ">=2023.5.2,<2024.0.0" +jsonschema = ">=4.0.0,<5.0.0" rfc3339-validator = "*" [[package]] name = "openapi-spec-validator" -version = "0.7.1" +version = "0.5.5" description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" optional = false -python-versions = ">=3.8.0,<4.0.0" +python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "openapi_spec_validator-0.7.1-py3-none-any.whl", hash = "sha256:3c81825043f24ccbcd2f4b149b11e8231abce5ba84f37065e14ec947d8f4e959"}, - {file = "openapi_spec_validator-0.7.1.tar.gz", hash = "sha256:8577b85a8268685da6f8aa30990b83b7960d4d1117e901d451b5d572605e5ec7"}, + {file = "openapi_spec_validator-0.5.5-py3-none-any.whl", hash = "sha256:93ba247f585e1447214b4207728a7cce3726d148238217be69e6b8725c118fbe"}, + {file = "openapi_spec_validator-0.5.5.tar.gz", hash = "sha256:3010df5237748e25d7fac2b2aaf13457c1afd02735b2bd6f008a10079c8f443a"}, ] [package.dependencies] -importlib-resources = {version = ">=5.8,<7.0", markers = "python_version < \"3.9\""} -jsonschema = ">=4.18.0,<5.0.0" -jsonschema-path = ">=0.3.1,<0.4.0" +importlib-resources = {version = ">=5.8.0,<6.0.0", markers = "python_version < \"3.9\""} +jsonschema = ">=4.0.0,<5.0.0" +jsonschema-spec = ">=0.1.1,<0.2.0" lazy-object-proxy = ">=1.7.1,<2.0.0" -openapi-schema-validator = ">=0.6.0,<0.7.0" +openapi-schema-validator = ">=0.4.2,<0.5.0" + +[package.extras] +requests = ["requests"] [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -2374,8 +2555,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -2439,28 +2620,28 @@ files = [ [[package]] name = "platformdirs" -version = "3.10.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -2506,26 +2687,52 @@ virtualenv = ">=20.10.0" [[package]] name = "protobuf" -version = "4.24.3" +version = "4.25.3" description = "" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"}, - {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"}, - {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"}, - {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"}, - {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"}, - {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"}, - {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"}, - {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"}, - {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"}, - {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"}, - {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"}, - {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"}, - {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] +[[package]] +name = "psutil" +version = "5.9.8" +description = "Cross-platform lib for process and system monitoring in Python." +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + [[package]] name = "psycopg2-binary" version = "2.9.9" @@ -2559,7 +2766,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -2568,8 +2774,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -2609,75 +2813,86 @@ files = [ [[package]] name = "py-partiql-parser" -version = "0.5.0" +version = "0.5.1" description = "Pure Python PartiQL Parser" optional = false python-versions = "*" files = [ - {file = "py-partiql-parser-0.5.0.tar.gz", hash = "sha256:427a662e87d51a0a50150fc8b75c9ebb4a52d49129684856c40c88b8c8e027e4"}, - {file = "py_partiql_parser-0.5.0-py3-none-any.whl", hash = "sha256:dc454c27526adf62deca5177ea997bf41fac4fd109c5d4c8d81f984de738ba8f"}, + {file = "py-partiql-parser-0.5.1.tar.gz", hash = "sha256:aeac8f46529d8651bbae88a1a6c14dc3aa38ebc4bc6bd1eb975044c0564246c6"}, + {file = "py_partiql_parser-0.5.1-py3-none-any.whl", hash = "sha256:53053e70987dea2983e1990ad85f87a7d8cec13dd4a4b065a740bcfd661f5a6b"}, ] [package.extras] dev = ["black (==22.6.0)", "flake8", "mypy", "pytest"] +[[package]] +name = "py4j" +version = "0.10.9.7" +description = "Enables Python programs to dynamically access arbitrary Java objects" +optional = false +python-versions = "*" +files = [ + {file = "py4j-0.10.9.7-py2.py3-none-any.whl", hash = "sha256:85defdfd2b2376eb3abf5ca6474b51ab7e0de341c75a02f46dc9b5976f5a5c1b"}, + {file = "py4j-0.10.9.7.tar.gz", hash = "sha256:0b6e5315bb3ada5cf62ac651d107bb2ebc02def3dee9d9548e3baac644ea8dbb"}, +] + [[package]] name = "pyarrow" -version = "14.0.2" +version = "15.0.0" description = "Python library for Apache Arrow" optional = true python-versions = ">=3.8" files = [ - {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, - {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, - {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, - {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, - {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, - {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, - {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, - {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, + {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, + {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, + {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, + {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, + {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, + {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, ] [package.dependencies] -numpy = ">=1.16.6" +numpy = ">=1.16.6,<2" [[package]] name = "pyasn1" -version = "0.5.0" +version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false +optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, - {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, ] [[package]] @@ -2707,18 +2922,18 @@ files = [ [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -2726,116 +2941,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -2843,17 +3032,18 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" -version = "2.16.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" @@ -2903,15 +3093,35 @@ files = [ [package.dependencies] tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +[[package]] +name = "pyspark" +version = "3.5.0" +description = "Apache Spark Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyspark-3.5.0.tar.gz", hash = "sha256:d41a9b76bd2aca370a6100d075c029e22ba44c5940927877e9435a3a9c566558"}, +] + +[package.dependencies] +py4j = "0.10.9.7" + +[package.extras] +connect = ["googleapis-common-protos (>=1.56.4)", "grpcio (>=1.56.0)", "grpcio-status (>=1.56.0)", "numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=4.0.0)"] +ml = ["numpy (>=1.15)"] +mllib = ["numpy (>=1.15)"] +pandas-on-spark = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=4.0.0)"] +sql = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=4.0.0)"] + [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -2990,28 +3200,6 @@ files = [ [package.dependencies] six = ">=1.5" -[[package]] -name = "python-jose" -version = "3.3.0" -description = "JOSE implementation in Python" -optional = false -python-versions = "*" -files = [ - {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, - {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, -] - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""} -ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" - -[package.extras] -cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] - [[package]] name = "python-snappy" version = "0.6.1" @@ -3071,13 +3259,13 @@ files = [ [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -3164,35 +3352,31 @@ files = [ [[package]] name = "ray" -version = "2.7.1" +version = "2.9.2" description = "Ray provides a simple, universal API for building distributed applications." optional = true -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "ray-2.7.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:4a2c98ab42881836894f20408ce40c0fd7fe5da7f0bc69cf22c951ccceda55ed"}, - {file = "ray-2.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:53800aadfc07152bc8672d5fa91bb4dc17d96b572a9bd436dd00fd2e0d07ef6a"}, - {file = "ray-2.7.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:17a425b4a2c2098f78fd0ab3831a35a53608d36466453e90c30a6495e9dce354"}, - {file = "ray-2.7.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:9681a8a7bf081e2244360206f3cd80d1a6adb4dc6330a507fd8c78ebe6e57365"}, - {file = "ray-2.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:148c77050ceab3c90739147bb86ac535e9590046cc36364ae9eb15469ea16fbc"}, - {file = "ray-2.7.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:0b0e80e26d6899820c12301626a74a209ab29373f46caf5b48c3ae3f99ec1bc7"}, - {file = "ray-2.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b5d13e910bb3449ef7b25084dcc4f0b9a763d3aa7b2fdd39e3b4d93d8c266951"}, - {file = "ray-2.7.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:0a6e8a736fe5294a0b0064679e59e393c66942db81fdf95804bdc1495d1f1651"}, - {file = "ray-2.7.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:f4c9f8a813444bd5346756db1a6d6e09a805b28b5fb6831e91b8d1324c12a888"}, - {file = "ray-2.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:85a8b0f122e4c14d2ee354fce9651834f7ffc9b60ebdce023a5ba8ca5841a6ee"}, - {file = "ray-2.7.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:bfa924bbc4042e83a0f31f058f08818418307252fceeee27c4c02bc0d3c02f3f"}, - {file = "ray-2.7.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0f5657abb376eddf6b56489082d2f94ab36597a2f25da2849e2f66476b90dcc0"}, - {file = "ray-2.7.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:d548e1c67a512975c4241be64a8df2153ae6c29ee2f5b08834fadcad7dfc94a4"}, - {file = "ray-2.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1f4c09a81971cc54d95be55b9b413fd12121a37528b402d1861a8fa0b4e85509"}, - {file = "ray-2.7.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:1f6d2508d117aac0b880d26a4db65a9f90def2d688709b62e0d039879c3afc7a"}, - {file = "ray-2.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32a6c0866d559d4e6c623ff220cd0790d2da1f3785073a5d0444b8f0486ff541"}, - {file = "ray-2.7.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d035642e6033f43551a0c17e2363a392739f01df6b4072c5ed71cf3096936d33"}, - {file = "ray-2.7.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a366569d1bd220a92af0dbe092821a11d1ff8ad7b00ed4f74b8a5f380e34ccc7"}, - {file = "ray-2.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:6fe65dc7f83f1c617af3068d84f8c67f3371b1a48776e44ab6af54998891364c"}, - {file = "ray-2.7.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:3c1501ca56da394e07213efd5be42c2cf0a2eae68d76949d26a3133154d6d9ff"}, - {file = "ray-2.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57f7e05ad275317158c447680705e046410f68d2a5992e16d07bbc2cc79da2b3"}, - {file = "ray-2.7.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b5410ae53c765108c65821fc5e5968509579f98a64d275e103408e1b068e8ca8"}, - {file = "ray-2.7.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:1b096abab78b63db6c1a2633f242dd8b3c51e395b574215f3cb8e47f5d7364b9"}, - {file = "ray-2.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:c03fe26443598bd7ad1c22de4585daec324bc03eabc04d3c2f805d9697a554d6"}, + {file = "ray-2.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:f879522e7d9b809d3aa28fb627ab87344b31cf79e1829b9b67f0581305e2bb84"}, + {file = "ray-2.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccbcf7f57bf10c52b3ebcec6e8d9114491ef12a20255e70ba0d5f12a81e9391c"}, + {file = "ray-2.9.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a3059e1d4287db33811e7604b4ecc1aa79dc2d745b49e5ec3415060da61cb749"}, + {file = "ray-2.9.2-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:11a0fd3f75ca07f727b1e83c3b2c74e75dc1fe0aba99a416a865f83e7ff23620"}, + {file = "ray-2.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:1e5c4733314bab19d89b373836899e76b2ab839d45f59966b431c89076feaab7"}, + {file = "ray-2.9.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1a734c1c586e666f5024b46405c3df52b634977c9565bca16a01d6fefc457578"}, + {file = "ray-2.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:47975fcb4b6e6cadd35da4b78f3e643f5ab6e99a688d79980ca2f0dca345d034"}, + {file = "ray-2.9.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:7d7ed76fa40abe81eefb158d505f046de0614d994fc8027f5b05889824503257"}, + {file = "ray-2.9.2-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:83fd7961d39da5ae68731be430891a21a13d0257bc25ab6adf13712297e309fa"}, + {file = "ray-2.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:5929ac8221ba3b6446cd0885a0ade50cfaaecacba771dfeed57ead8b5c6fdd14"}, + {file = "ray-2.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:61b5a742f1f249e92893433720423f729018d40ee26a015b6a12b443d0e2e3eb"}, + {file = "ray-2.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:51552b1142944e13ba1da0c44395a627701c563fbe3f6c490001e6e4fd0ee011"}, + {file = "ray-2.9.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8b715b0ad9aa027836ecb7dc33b3a2dfc91c5d9d22a0ddf72c0844df5d641fca"}, + {file = "ray-2.9.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:e1a35e2a3de4e3875bd1e76770fb89149adc773193a5e79488db4047ef14ffd0"}, + {file = "ray-2.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d97f674c675370550ec4347e7e8dee0f99e38dd8f220ff8acb8ca15c208d73a"}, + {file = "ray-2.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:efa2c60ab11f41e4d43a227cd6bf491f9f2f8ed820c482c7d8d86a2412b6fd05"}, + {file = "ray-2.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5ebd71ef2e4d76752a1ff048e9d4c22811c7e990e8d4e3b30974b3e4099411b6"}, + {file = "ray-2.9.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d84064ab3aa2868991a98dc6a54cc2221abcaf9406eb95fa2ec0f66006585f92"}, + {file = "ray-2.9.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:bc95efd035dcdc2f2b549ce3e13c5abf2043f3136b8a5980d77f4f098a9a6796"}, + {file = "ray-2.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:aea2ad4dbad2d6bd21ba17f7a2fcf762f53d8bcbc30b9d6916245e447a971e48"}, ] [package.dependencies] @@ -3202,38 +3386,34 @@ filelock = "*" frozenlist = "*" jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" -numpy = [ - {version = ">=1.16", markers = "python_version < \"3.9\""}, - {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, -] packaging = "*" protobuf = ">=3.15.3,<3.19.5 || >3.19.5" pyyaml = "*" requests = "*" [package.extras] -air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml", "ray-cpp (==2.7.1)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi (<=0.108.0)", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi (<=0.108.0)", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.9.2)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] client = ["grpcio (!=1.56.0)"] -cpp = ["ray-cpp (==2.7.1)"] +cpp = ["ray-cpp (==2.9.2)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] -default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "virtualenv (>=20.0.24,<20.21.1)"] +default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] rllib = ["dm-tree", "fsspec", "gymnasium (==0.28.1)", "lz4", "pandas", "pyarrow (>=6.0.1)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "tensorboardX (>=1.9)", "typer"] -serve = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] -serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +serve = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi (<=0.108.0)", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi (<=0.108.0)", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] train = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] [[package]] name = "referencing" -version = "0.30.2" +version = "0.33.0" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, - {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, + {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, + {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, ] [package.dependencies] @@ -3242,99 +3422,104 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2023.10.3" +version = "2023.12.25" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.7" files = [ - {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, - {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, - {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, - {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, - {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, - {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, - {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, - {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, - {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, - {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, - {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, - {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, - {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, - {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, - {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, - {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] [[package]] @@ -3397,23 +3582,22 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "responses" -version = "0.23.3" +version = "0.25.0" description = "A utility library for mocking out the `requests` Python library." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "responses-0.23.3-py3-none-any.whl", hash = "sha256:e6fbcf5d82172fecc0aa1860fd91e58cbfd96cee5e96da5b63fa6eb3caa10dd3"}, - {file = "responses-0.23.3.tar.gz", hash = "sha256:205029e1cb334c21cb4ec64fc7599be48b859a0fd381a42443cdd600bfe8b16a"}, + {file = "responses-0.25.0-py3-none-any.whl", hash = "sha256:2f0b9c2b6437db4b528619a77e5d565e4ec2a9532162ac1a131a83529db7be1a"}, + {file = "responses-0.25.0.tar.gz", hash = "sha256:01ae6a02b4f34e39bffceb0fc6786b67a25eae919c6368d05eabc8d9576c2a66"}, ] [package.dependencies] pyyaml = "*" requests = ">=2.30.0,<3.0" -types-PyYAML = "*" urllib3 = ">=1.25.10,<3.0" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] [[package]] name = "rfc3339-validator" @@ -3450,115 +3634,117 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.10.3" +version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.10.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:485747ee62da83366a44fbba963c5fe017860ad408ccd6cd99aa66ea80d32b2e"}, - {file = "rpds_py-0.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c55f9821f88e8bee4b7a72c82cfb5ecd22b6aad04033334f33c329b29bfa4da0"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b52a67ac66a3a64a7e710ba629f62d1e26ca0504c29ee8cbd99b97df7079a8"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3aed39db2f0ace76faa94f465d4234aac72e2f32b009f15da6492a561b3bbebd"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271c360fdc464fe6a75f13ea0c08ddf71a321f4c55fc20a3fe62ea3ef09df7d9"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef5fddfb264e89c435be4adb3953cef5d2936fdeb4463b4161a6ba2f22e7b740"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771417c9c06c56c9d53d11a5b084d1de75de82978e23c544270ab25e7c066ff"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:52b5cbc0469328e58180021138207e6ec91d7ca2e037d3549cc9e34e2187330a"}, - {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6ac3fefb0d168c7c6cab24fdfc80ec62cd2b4dfd9e65b84bdceb1cb01d385c33"}, - {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8d54bbdf5d56e2c8cf81a1857250f3ea132de77af543d0ba5dce667183b61fec"}, - {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cd2163f42868865597d89399a01aa33b7594ce8e2c4a28503127c81a2f17784e"}, - {file = "rpds_py-0.10.3-cp310-none-win32.whl", hash = "sha256:ea93163472db26ac6043e8f7f93a05d9b59e0505c760da2a3cd22c7dd7111391"}, - {file = "rpds_py-0.10.3-cp310-none-win_amd64.whl", hash = "sha256:7cd020b1fb41e3ab7716d4d2c3972d4588fdfbab9bfbbb64acc7078eccef8860"}, - {file = "rpds_py-0.10.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:1d9b5ee46dcb498fa3e46d4dfabcb531e1f2e76b477e0d99ef114f17bbd38453"}, - {file = "rpds_py-0.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:563646d74a4b4456d0cf3b714ca522e725243c603e8254ad85c3b59b7c0c4bf0"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e626b864725680cd3904414d72e7b0bd81c0e5b2b53a5b30b4273034253bb41f"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485301ee56ce87a51ccb182a4b180d852c5cb2b3cb3a82f7d4714b4141119d8c"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42f712b4668831c0cd85e0a5b5a308700fe068e37dcd24c0062904c4e372b093"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c9141af27a4e5819d74d67d227d5047a20fa3c7d4d9df43037a955b4c748ec5"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef750a20de1b65657a1425f77c525b0183eac63fe7b8f5ac0dd16f3668d3e64f"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1a0ffc39f51aa5f5c22114a8f1906b3c17eba68c5babb86c5f77d8b1bba14d1"}, - {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f4c179a7aeae10ddf44c6bac87938134c1379c49c884529f090f9bf05566c836"}, - {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:176287bb998fd1e9846a9b666e240e58f8d3373e3bf87e7642f15af5405187b8"}, - {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6446002739ca29249f0beaaf067fcbc2b5aab4bc7ee8fb941bd194947ce19aff"}, - {file = "rpds_py-0.10.3-cp311-none-win32.whl", hash = "sha256:c7aed97f2e676561416c927b063802c8a6285e9b55e1b83213dfd99a8f4f9e48"}, - {file = "rpds_py-0.10.3-cp311-none-win_amd64.whl", hash = "sha256:8bd01ff4032abaed03f2db702fa9a61078bee37add0bd884a6190b05e63b028c"}, - {file = "rpds_py-0.10.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:4cf0855a842c5b5c391dd32ca273b09e86abf8367572073bd1edfc52bc44446b"}, - {file = "rpds_py-0.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:69b857a7d8bd4f5d6e0db4086da8c46309a26e8cefdfc778c0c5cc17d4b11e08"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:975382d9aa90dc59253d6a83a5ca72e07f4ada3ae3d6c0575ced513db322b8ec"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35fbd23c1c8732cde7a94abe7fb071ec173c2f58c0bd0d7e5b669fdfc80a2c7b"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:106af1653007cc569d5fbb5f08c6648a49fe4de74c2df814e234e282ebc06957"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce5e7504db95b76fc89055c7f41e367eaadef5b1d059e27e1d6eabf2b55ca314"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aca759ada6b1967fcfd4336dcf460d02a8a23e6abe06e90ea7881e5c22c4de6"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b5d4bdd697195f3876d134101c40c7d06d46c6ab25159ed5cbd44105c715278a"}, - {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a657250807b6efd19b28f5922520ae002a54cb43c2401e6f3d0230c352564d25"}, - {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:177c9dd834cdf4dc39c27436ade6fdf9fe81484758885f2d616d5d03c0a83bd2"}, - {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e22491d25f97199fc3581ad8dd8ce198d8c8fdb8dae80dea3512e1ce6d5fa99f"}, - {file = "rpds_py-0.10.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:2f3e1867dd574014253b4b8f01ba443b9c914e61d45f3674e452a915d6e929a3"}, - {file = "rpds_py-0.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c22211c165166de6683de8136229721f3d5c8606cc2c3d1562da9a3a5058049c"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40bc802a696887b14c002edd43c18082cb7b6f9ee8b838239b03b56574d97f71"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e271dd97c7bb8eefda5cca38cd0b0373a1fea50f71e8071376b46968582af9b"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95cde244e7195b2c07ec9b73fa4c5026d4a27233451485caa1cd0c1b55f26dbd"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a80cf4884920863623a9ee9a285ee04cef57ebedc1cc87b3e3e0f24c8acfe5"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763ad59e105fca09705d9f9b29ecffb95ecdc3b0363be3bb56081b2c6de7977a"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:187700668c018a7e76e89424b7c1042f317c8df9161f00c0c903c82b0a8cac5c"}, - {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5267cfda873ad62591b9332fd9472d2409f7cf02a34a9c9cb367e2c0255994bf"}, - {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:2ed83d53a8c5902ec48b90b2ac045e28e1698c0bea9441af9409fc844dc79496"}, - {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:255f1a10ae39b52122cce26ce0781f7a616f502feecce9e616976f6a87992d6b"}, - {file = "rpds_py-0.10.3-cp38-none-win32.whl", hash = "sha256:a019a344312d0b1f429c00d49c3be62fa273d4a1094e1b224f403716b6d03be1"}, - {file = "rpds_py-0.10.3-cp38-none-win_amd64.whl", hash = "sha256:efb9ece97e696bb56e31166a9dd7919f8f0c6b31967b454718c6509f29ef6fee"}, - {file = "rpds_py-0.10.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:570cc326e78ff23dec7f41487aa9c3dffd02e5ee9ab43a8f6ccc3df8f9327623"}, - {file = "rpds_py-0.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cff7351c251c7546407827b6a37bcef6416304fc54d12d44dbfecbb717064717"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:177914f81f66c86c012311f8c7f46887ec375cfcfd2a2f28233a3053ac93a569"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:448a66b8266de0b581246ca7cd6a73b8d98d15100fb7165974535fa3b577340e"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bbac1953c17252f9cc675bb19372444aadf0179b5df575ac4b56faaec9f6294"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dd9d9d9e898b9d30683bdd2b6c1849449158647d1049a125879cb397ee9cd12"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8c71ea77536149e36c4c784f6d420ffd20bea041e3ba21ed021cb40ce58e2c9"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16a472300bc6c83fe4c2072cc22b3972f90d718d56f241adabc7ae509f53f154"}, - {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9255e7165083de7c1d605e818025e8860636348f34a79d84ec533546064f07e"}, - {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:53d7a3cd46cdc1689296348cb05ffd4f4280035770aee0c8ead3bbd4d6529acc"}, - {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22da15b902f9f8e267020d1c8bcfc4831ca646fecb60254f7bc71763569f56b1"}, - {file = "rpds_py-0.10.3-cp39-none-win32.whl", hash = "sha256:850c272e0e0d1a5c5d73b1b7871b0a7c2446b304cec55ccdb3eaac0d792bb065"}, - {file = "rpds_py-0.10.3-cp39-none-win_amd64.whl", hash = "sha256:de61e424062173b4f70eec07e12469edde7e17fa180019a2a0d75c13a5c5dc57"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:af247fd4f12cca4129c1b82090244ea5a9d5bb089e9a82feb5a2f7c6a9fe181d"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ad59efe24a4d54c2742929001f2d02803aafc15d6d781c21379e3f7f66ec842"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642ed0a209ced4be3a46f8cb094f2d76f1f479e2a1ceca6de6346a096cd3409d"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37d0c59548ae56fae01c14998918d04ee0d5d3277363c10208eef8c4e2b68ed6"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad6ed9e70ddfb34d849b761fb243be58c735be6a9265b9060d6ddb77751e3e8"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f94fdd756ba1f79f988855d948ae0bad9ddf44df296770d9a58c774cfbcca72"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77076bdc8776a2b029e1e6ffbe6d7056e35f56f5e80d9dc0bad26ad4a024a762"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87d9b206b1bd7a0523375dc2020a6ce88bca5330682ae2fe25e86fd5d45cea9c"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8efaeb08ede95066da3a3e3c420fcc0a21693fcd0c4396d0585b019613d28515"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a4d9bfda3f84fc563868fe25ca160c8ff0e69bc4443c5647f960d59400ce6557"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d27aa6bbc1f33be920bb7adbb95581452cdf23005d5611b29a12bb6a3468cc95"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ed8313809571a5463fd7db43aaca68ecb43ca7a58f5b23b6e6c6c5d02bdc7882"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:e10e6a1ed2b8661201e79dff5531f8ad4cdd83548a0f81c95cf79b3184b20c33"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:015de2ce2af1586ff5dc873e804434185199a15f7d96920ce67e50604592cae9"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae87137951bb3dc08c7d8bfb8988d8c119f3230731b08a71146e84aaa919a7a9"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bb4f48bd0dd18eebe826395e6a48b7331291078a879295bae4e5d053be50d4c"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09362f86ec201288d5687d1dc476b07bf39c08478cde837cb710b302864e7ec9"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821392559d37759caa67d622d0d2994c7a3f2fb29274948ac799d496d92bca73"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7170cbde4070dc3c77dec82abf86f3b210633d4f89550fa0ad2d4b549a05572a"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:5de11c041486681ce854c814844f4ce3282b6ea1656faae19208ebe09d31c5b8"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:4ed172d0c79f156c1b954e99c03bc2e3033c17efce8dd1a7c781bc4d5793dfac"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:11fdd1192240dda8d6c5d18a06146e9045cb7e3ba7c06de6973000ff035df7c6"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f602881d80ee4228a2355c68da6b296a296cd22bbb91e5418d54577bbf17fa7c"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:691d50c99a937709ac4c4cd570d959a006bd6a6d970a484c84cc99543d4a5bbb"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24cd91a03543a0f8d09cb18d1cb27df80a84b5553d2bd94cba5979ef6af5c6e7"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc2200e79d75b5238c8d69f6a30f8284290c777039d331e7340b6c17cad24a5a"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea65b59882d5fa8c74a23f8960db579e5e341534934f43f3b18ec1839b893e41"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:829e91f3a8574888b73e7a3feb3b1af698e717513597e23136ff4eba0bc8387a"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eab75a8569a095f2ad470b342f2751d9902f7944704f0571c8af46bede438475"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:061c3ff1f51ecec256e916cf71cc01f9975af8fb3af9b94d3c0cc8702cfea637"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:39d05e65f23a0fe897b6ac395f2a8d48c56ac0f583f5d663e0afec1da89b95da"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eca20917a06d2fca7628ef3c8b94a8c358f6b43f1a621c9815243462dcccf97"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e8d0f0eca087630d58b8c662085529781fd5dc80f0a54eda42d5c9029f812599"}, - {file = "rpds_py-0.10.3.tar.gz", hash = "sha256:fcc1ebb7561a3e24a6588f7c6ded15d80aec22c66a070c757559b57b17ffd1cb"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, ] [[package]] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -optional = false +optional = true python-versions = ">=3.6,<4" files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, @@ -3570,40 +3756,40 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3fs" -version = "2023.9.1" +version = "2023.12.2" description = "Convenient Filesystem interface over S3" optional = true python-versions = ">= 3.8" files = [ - {file = "s3fs-2023.9.1-py3-none-any.whl", hash = "sha256:3bd1f9f33e4ad090d150301c3b386061cb7085fc8bda3a9ec9198dccca765d6c"}, - {file = "s3fs-2023.9.1.tar.gz", hash = "sha256:42e1821ed94c1607c848853d1d715ebcd25c13380b6f510c2cb498c7e5b3e674"}, + {file = "s3fs-2023.12.2-py3-none-any.whl", hash = "sha256:0d5a99039665f30b2dbee5495de3b299a022d51b3195a9440f5df47c2621b777"}, + {file = "s3fs-2023.12.2.tar.gz", hash = "sha256:b5ec07062481bbb45cb061b31984c7188d106e292c27033039e024e4ba5740dc"}, ] [package.dependencies] -aiobotocore = ">=2.5.4,<2.6.0" +aiobotocore = ">=2.5.4,<3.0.0" aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" -fsspec = "2023.9.1" +fsspec = "2023.12.2" [package.extras] -awscli = ["aiobotocore[awscli] (>=2.5.4,<2.6.0)"] -boto3 = ["aiobotocore[boto3] (>=2.5.4,<2.6.0)"] +awscli = ["aiobotocore[awscli] (>=2.5.4,<3.0.0)"] +boto3 = ["aiobotocore[boto3] (>=2.5.4,<3.0.0)"] [[package]] name = "s3transfer" -version = "0.6.2" +version = "0.10.0" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"}, - {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"}, + {file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"}, + {file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"}, ] [package.dependencies] -botocore = ">=1.12.36,<2.0a.0" +botocore = ">=1.33.2,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "sarif-om" @@ -3622,18 +3808,18 @@ pbr = "*" [[package]] name = "setuptools" -version = "68.2.2" +version = "69.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -3660,70 +3846,70 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.23" +version = "2.0.27" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, - {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, - {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, + {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, + {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, ] [package.dependencies] greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} -typing-extensions = ">=4.2.0" +typing-extensions = ">=4.6.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] @@ -3733,7 +3919,7 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=8)"] +oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -3743,25 +3929,7 @@ postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3-binary"] - -[[package]] -name = "sshpubkeys" -version = "3.3.1" -description = "SSH public key parser" -optional = false -python-versions = ">=3" -files = [ - {file = "sshpubkeys-3.3.1-py2.py3-none-any.whl", hash = "sha256:946f76b8fe86704b0e7c56a00d80294e39bc2305999844f079a217885060b1ac"}, - {file = "sshpubkeys-3.3.1.tar.gz", hash = "sha256:3020ed4f8c846849299370fbe98ff4157b0ccc1accec105e07cfa9ae4bb55064"}, -] - -[package.dependencies] -cryptography = ">=2.1.4" -ecdsa = ">=0.13" - -[package.extras] -dev = ["twine", "wheel", "yapf"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "strictyaml" @@ -3791,6 +3959,20 @@ files = [ [package.dependencies] mpmath = ">=0.19" +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + [[package]] name = "thrift" version = "0.16.0" @@ -3821,16 +4003,25 @@ files = [ ] [[package]] -name = "types-pyyaml" -version = "6.0.12.11" -description = "Typing stubs for PyYAML" -optional = false -python-versions = "*" +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = true +python-versions = ">=3.7" files = [ - {file = "types-PyYAML-6.0.12.11.tar.gz", hash = "sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b"}, - {file = "types_PyYAML-6.0.12.11-py3-none-any.whl", hash = "sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.9.0" @@ -3844,13 +4035,13 @@ files = [ [[package]] name = "tzdata" -version = "2023.3" +version = "2024.1" description = "Provider of IANA time zone data" optional = true python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -3869,21 +4060,38 @@ brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotl secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "virtualenv" -version = "20.24.5" +version = "20.25.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, - {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<4" +platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] @@ -3908,86 +4116,81 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "wrapt" -version = "1.15.0" +version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, - {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, - {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, - {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, - {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, - {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, - {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, - {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, - {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, - {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, - {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, - {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, - {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, - {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, - {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, - {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, - {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, - {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, - {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] [[package]] @@ -4003,85 +4206,101 @@ files = [ [[package]] name = "yarl" -version = "1.9.2" +version = "1.9.4" description = "Yet another URL library" optional = true python-versions = ">=3.7" files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, ] [package.dependencies] @@ -4166,6 +4385,7 @@ cffi = ["cffi (>=1.11)"] [extras] adlfs = ["adlfs"] +daft = ["getdaft"] duckdb = ["duckdb", "pyarrow"] dynamodb = ["boto3"] gcsfs = ["gcsfs"] @@ -4177,9 +4397,10 @@ ray = ["pandas", "pyarrow", "ray"] s3fs = ["s3fs"] snappy = ["python-snappy"] sql-postgres = ["psycopg2-binary", "sqlalchemy"] +sql-sqlite = ["sqlalchemy"] zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "0e2f758b34577e0d1294391c316cb2166108c2d27b24163ef54c7a36cd7fc542" +content-hash = "e56883bd2961705dffa548c4a32f2d4215a646de149e12aa519df336ede48076" diff --git a/pyiceberg/avro/codecs/__init__.py b/pyiceberg/avro/codecs/__init__.py index 8010b9397e..22e2f71cf8 100644 --- a/pyiceberg/avro/codecs/__init__.py +++ b/pyiceberg/avro/codecs/__init__.py @@ -23,6 +23,7 @@ so don't confuse it with the Python's "codecs", which is a package mainly for converting character sets (https://docs.python.org/3/library/codecs.html). """ + from __future__ import annotations from typing import Dict, Optional, Type diff --git a/pyiceberg/avro/codecs/codec.py b/pyiceberg/avro/codecs/codec.py index 1c04f0db3e..ab97ab6990 100644 --- a/pyiceberg/avro/codecs/codec.py +++ b/pyiceberg/avro/codecs/codec.py @@ -24,10 +24,8 @@ class Codec(ABC): @staticmethod @abstractmethod - def compress(data: bytes) -> tuple[bytes, int]: - ... + def compress(data: bytes) -> tuple[bytes, int]: ... @staticmethod @abstractmethod - def decompress(data: bytes) -> bytes: - ... + def decompress(data: bytes) -> bytes: ... diff --git a/pyiceberg/avro/decoder_fast.pyi b/pyiceberg/avro/decoder_fast.pyi index cf45ce5066..cc367c4764 100644 --- a/pyiceberg/avro/decoder_fast.pyi +++ b/pyiceberg/avro/decoder_fast.pyi @@ -20,37 +20,54 @@ from pyiceberg.avro.decoder import BinaryDecoder class CythonBinaryDecoder(BinaryDecoder): def __init__(self, input_contents: bytes) -> None: pass + def tell(self) -> int: pass + def read(self, n: int) -> bytes: pass + def read_boolean(self) -> bool: pass + def read_int(self) -> int: pass + def read_ints(self, count: int) -> tuple[int, ...]: pass + def read_int_bytes_dict(self, count: int, dest: dict[int, bytes]) -> None: pass + def read_bytes(self) -> bytes: pass + def read_float(self) -> float: pass + def read_double(self) -> float: pass + def read_utf8(self) -> str: pass + def skip(self, n: int) -> None: pass + def skip_int(self) -> None: pass + def skip_boolean(self) -> None: pass + def skip_float(self) -> None: pass + def skip_double(self) -> None: pass + def skip_bytes(self) -> None: pass + def skip_utf8(self) -> None: pass diff --git a/pyiceberg/avro/decoder_fast.pyx b/pyiceberg/avro/decoder_fast.pyx index 182fd0e92e..52caec3308 100644 --- a/pyiceberg/avro/decoder_fast.pyx +++ b/pyiceberg/avro/decoder_fast.pyx @@ -32,9 +32,7 @@ unsigned_long_long_array_template = cython.declare(array.array, array.array('Q', @cython.final cdef class CythonBinaryDecoder: - """Implement a BinaryDecoder that reads from an in-memory buffer. - - """ + """Implement a BinaryDecoder that reads from an in-memory buffer.""" # This the data that is duplicated when the decoder is created. cdef unsigned char *_data diff --git a/pyiceberg/avro/file.py b/pyiceberg/avro/file.py index 4985c6fb60..d0da7651b7 100644 --- a/pyiceberg/avro/file.py +++ b/pyiceberg/avro/file.py @@ -16,6 +16,7 @@ # under the License. # pylint: disable=W0621 """Avro reader for reading Avro files.""" + from __future__ import annotations import io @@ -34,7 +35,8 @@ TypeVar, ) -from pyiceberg.avro.codecs import KNOWN_CODECS, Codec +from pyiceberg.avro.codecs import KNOWN_CODECS +from pyiceberg.avro.codecs.codec import Codec from pyiceberg.avro.decoder import BinaryDecoder, new_decoder from pyiceberg.avro.encoder import BinaryEncoder from pyiceberg.avro.reader import Reader @@ -193,8 +195,7 @@ def _read_block(self) -> int: raise ValueError(f"Expected sync bytes {self.header.sync!r}, but got {sync_marker!r}") block_records = self.decoder.read_int() - block_bytes_len = self.decoder.read_int() - block_bytes = self.decoder.read(block_bytes_len) + block_bytes = self.decoder.read_bytes() if codec := self.header.compression_codec(): block_bytes = codec.decompress(block_bytes) diff --git a/pyiceberg/avro/reader.py b/pyiceberg/avro/reader.py index 2b87e4b06f..988bd42ba4 100644 --- a/pyiceberg/avro/reader.py +++ b/pyiceberg/avro/reader.py @@ -23,6 +23,7 @@ The reader tree can be changed in such a way that the read schema is different, while respecting the read schema. """ + from __future__ import annotations from abc import abstractmethod @@ -85,12 +86,10 @@ def _skip_map_array(decoder: BinaryDecoder, skip_entry: Callable[[], None]) -> N class Reader(Singleton): @abstractmethod - def read(self, decoder: BinaryDecoder) -> Any: - ... + def read(self, decoder: BinaryDecoder) -> Any: ... @abstractmethod - def skip(self, decoder: BinaryDecoder) -> None: - ... + def skip(self, decoder: BinaryDecoder) -> None: ... def __repr__(self) -> str: """Return the string representation of the Reader class.""" diff --git a/pyiceberg/avro/resolver.py b/pyiceberg/avro/resolver.py index faf4dd0501..2a53f4869b 100644 --- a/pyiceberg/avro/resolver.py +++ b/pyiceberg/avro/resolver.py @@ -232,9 +232,7 @@ def resolve_reader( Raises: NotImplementedError: If attempting to resolve an unrecognized object type. """ - return visit_with_partner( - file_schema, read_schema, ReadSchemaResolver(read_types, read_enums), SchemaPartnerAccessor() - ) # type: ignore + return visit_with_partner(file_schema, read_schema, ReadSchemaResolver(read_types, read_enums), SchemaPartnerAccessor()) # type: ignore class EnumReader(Reader): diff --git a/pyiceberg/avro/writer.py b/pyiceberg/avro/writer.py index 4e3d3d476a..b53230d3c7 100644 --- a/pyiceberg/avro/writer.py +++ b/pyiceberg/avro/writer.py @@ -20,6 +20,7 @@ Constructing a writer tree from the schema makes it easy to decouple the writing implementation from the schema. """ + from __future__ import annotations from abc import abstractmethod @@ -43,8 +44,7 @@ @dataclass(frozen=True) class Writer(Singleton): @abstractmethod - def write(self, encoder: BinaryEncoder, val: Any) -> Any: - ... + def write(self, encoder: BinaryEncoder, val: Any) -> Any: ... def __repr__(self) -> str: """Return string representation of this object.""" diff --git a/pyiceberg/catalog/__init__.py b/pyiceberg/catalog/__init__.py index 993be87ddd..4f7b5a3292 100644 --- a/pyiceberg/catalog/__init__.py +++ b/pyiceberg/catalog/__init__.py @@ -18,11 +18,13 @@ from __future__ import annotations import logging +import re import uuid from abc import ABC, abstractmethod from dataclasses import dataclass from enum import Enum from typing import ( + TYPE_CHECKING, Callable, Dict, List, @@ -34,7 +36,7 @@ cast, ) -from pyiceberg.exceptions import NoSuchNamespaceError, NoSuchTableError, NotInstalledError +from pyiceberg.exceptions import NoSuchNamespaceError, NoSuchTableError, NotInstalledError, TableAlreadyExistsError from pyiceberg.io import FileIO, load_file_io from pyiceberg.manifest import ManifestFile from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec @@ -44,8 +46,8 @@ CommitTableRequest, CommitTableResponse, Table, - TableMetadata, ) +from pyiceberg.table.metadata import TableMetadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import ( EMPTY_DICT, @@ -55,6 +57,9 @@ ) from pyiceberg.utils.config import Config, merge_config +if TYPE_CHECKING: + import pyarrow as pa + logger = logging.getLogger(__name__) _ENV_CONFIG = Config() @@ -74,6 +79,17 @@ LOCATION = "location" EXTERNAL_TABLE = "EXTERNAL_TABLE" +TABLE_METADATA_FILE_NAME_REGEX = re.compile( + r""" + (\d+) # version number + - # separator + ([\w-]{36}) # UUID (36 characters, including hyphens) + (?:\.\w+)? # optional codec name + \.metadata\.json # file extension + """, + re.X, +) + class CatalogType(Enum): REST = "rest" @@ -122,7 +138,9 @@ def load_sql(name: str, conf: Properties) -> Catalog: return SqlCatalog(name, **conf) except ImportError as exc: - raise NotInstalledError("SQLAlchemy support not installed: pip install 'pyiceberg[sql-postgres]'") from exc + raise NotInstalledError( + "SQLAlchemy support not installed: pip install 'pyiceberg[sql-postgres]' or pip install 'pyiceberg[sql-sqlite]'" + ) from exc AVAILABLE_CATALOGS: dict[CatalogType, Callable[[str, Properties], Catalog]] = { @@ -274,7 +292,7 @@ def _load_file_io(self, properties: Properties = EMPTY_DICT, location: Optional[ def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -297,6 +315,34 @@ def create_table( TableAlreadyExistsError: If a table with the name already exists. """ + def create_table_if_not_exists( + self, + identifier: Union[str, Identifier], + schema: Union[Schema, "pa.Schema"], + location: Optional[str] = None, + partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, + sort_order: SortOrder = UNSORTED_SORT_ORDER, + properties: Properties = EMPTY_DICT, + ) -> Table: + """Create a table if it does not exist. + + Args: + identifier (str | Identifier): Table identifier. + schema (Schema): Table's schema. + location (str | None): Location for the table. Optional Argument. + partition_spec (PartitionSpec): PartitionSpec for the table. + sort_order (SortOrder): SortOrder for the table. + properties (Properties): Table properties that can be a string based dictionary. + + Returns: + Table: the created table instance if the table does not exist, else the existing + table instance. + """ + try: + return self.create_table(identifier, schema, location, partition_spec, sort_order, properties) + except TableAlreadyExistsError: + return self.load_table(identifier) + @abstractmethod def load_table(self, identifier: Union[str, Identifier]) -> Table: """Load the table's metadata and returns the table instance. @@ -367,6 +413,8 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. + CommitStateUnknownException: Failed due to an internal exception on the side of the catalog. """ @abstractmethod @@ -498,6 +546,22 @@ def _check_for_overlap(removals: Optional[Set[str]], updates: Properties) -> Non if overlap: raise ValueError(f"Updates and deletes have an overlap: {overlap}") + @staticmethod + def _convert_schema_if_needed(schema: Union[Schema, "pa.Schema"]) -> Schema: + if isinstance(schema, Schema): + return schema + try: + import pyarrow as pa + + from pyiceberg.io.pyarrow import _ConvertToIcebergWithoutIDs, visit_pyarrow + + if isinstance(schema, pa.Schema): + schema: Schema = visit_pyarrow(schema, _ConvertToIcebergWithoutIDs()) # type: ignore + return schema + except ModuleNotFoundError: + pass + raise ValueError(f"{type(schema)=}, but it must be pyiceberg.schema.Schema or pyarrow.Schema") + def _resolve_table_location(self, location: Optional[str], database_name: str, table_name: str) -> str: if not location: return self._get_default_warehouse_location(database_name, table_name) @@ -587,8 +651,38 @@ def _write_metadata(metadata: TableMetadata, io: FileIO, metadata_path: str) -> ToOutputFile.table_metadata(metadata, io.new_output(metadata_path)) @staticmethod - def _get_metadata_location(location: str) -> str: - return f"{location}/metadata/00000-{uuid.uuid4()}.metadata.json" + def _get_metadata_location(location: str, new_version: int = 0) -> str: + if new_version < 0: + raise ValueError(f"Table metadata version: `{new_version}` must be a non-negative integer") + version_str = f"{new_version:05d}" + return f"{location}/metadata/{version_str}-{uuid.uuid4()}.metadata.json" + + @staticmethod + def _parse_metadata_version(metadata_location: str) -> int: + """Parse the version from the metadata location. + + The version is the first part of the file name, before the first dash. + For example, the version of the metadata file + `s3://bucket/db/tb/metadata/00001-6c97e413-d51b-4538-ac70-12fe2a85cb83.metadata.json` + is 1. + If the path does not comply with the pattern, the version is defaulted to be -1, ensuring + that the next metadata file is treated as having version 0. + + Args: + metadata_location (str): The location of the metadata file. + + Returns: + int: The version of the metadata file. -1 if the file name does not have valid version string + """ + file_name = metadata_location.split("/")[-1] + if file_name_match := TABLE_METADATA_FILE_NAME_REGEX.fullmatch(file_name): + try: + uuid.UUID(file_name_match.group(2)) + except ValueError: + return -1 + return int(file_name_match.group(1)) + else: + return -1 def _get_updated_props_and_update_summary( self, current_properties: Properties, removals: Optional[Set[str]], updates: Properties diff --git a/pyiceberg/catalog/dynamodb.py b/pyiceberg/catalog/dynamodb.py index 3eee95da3c..266dd6353d 100644 --- a/pyiceberg/catalog/dynamodb.py +++ b/pyiceberg/catalog/dynamodb.py @@ -17,6 +17,7 @@ import uuid from time import time from typing import ( + TYPE_CHECKING, Any, Dict, List, @@ -33,8 +34,6 @@ PREVIOUS_METADATA_LOCATION, TABLE_TYPE, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( @@ -55,7 +54,10 @@ from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder -from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties + +if TYPE_CHECKING: + import pyarrow as pa DYNAMODB_CLIENT = "dynamodb" @@ -80,7 +82,15 @@ class DynamoDbCatalog(Catalog): def __init__(self, name: str, **properties: str): super().__init__(name, **properties) - self.dynamodb = boto3.client(DYNAMODB_CLIENT) + session = boto3.Session( + profile_name=properties.get("profile_name"), + region_name=properties.get("region_name"), + botocore_session=properties.get("botocore_session"), + aws_access_key_id=properties.get("aws_access_key_id"), + aws_secret_access_key=properties.get("aws_secret_access_key"), + aws_session_token=properties.get("aws_session_token"), + ) + self.dynamodb = session.client(DYNAMODB_CLIENT) self.dynamodb_table_name = self.properties.get(DYNAMODB_TABLE_NAME, DYNAMODB_TABLE_NAME_DEFAULT) self._ensure_catalog_table_exists_or_create() @@ -119,7 +129,7 @@ def _dynamodb_table_exists(self) -> bool: def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -144,6 +154,8 @@ def create_table( ValueError: If the identifier is invalid, or no path is given to store metadata. """ + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + database_name, table_name = self.identifier_to_database_and_table(identifier) location = self._resolve_table_location(location, database_name, table_name) @@ -194,6 +206,7 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. """ raise NotImplementedError diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index 723de2f335..adec150d84 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -17,7 +17,9 @@ from typing import ( + TYPE_CHECKING, Any, + Dict, List, Optional, Set, @@ -28,6 +30,7 @@ import boto3 from mypy_boto3_glue.client import GlueClient from mypy_boto3_glue.type_defs import ( + ColumnTypeDef, DatabaseInputTypeDef, DatabaseTypeDef, StorageDescriptorTypeDef, @@ -40,13 +43,13 @@ ICEBERG, LOCATION, METADATA_LOCATION, + PREVIOUS_METADATA_LOCATION, TABLE_TYPE, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( + CommitFailedException, NamespaceAlreadyExistsError, NamespaceNotEmptyError, NoSuchIcebergTableError, @@ -57,23 +60,158 @@ ) from pyiceberg.io import load_file_io from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec -from pyiceberg.schema import Schema +from pyiceberg.schema import Schema, SchemaVisitor, visit from pyiceberg.serializers import FromInputFile -from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table -from pyiceberg.table.metadata import new_table_metadata +from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, update_table_metadata +from pyiceberg.table.metadata import TableMetadata, new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder -from pyiceberg.typedef import EMPTY_DICT - - -def _construct_parameters(metadata_location: str) -> Properties: - return {TABLE_TYPE: ICEBERG.upper(), METADATA_LOCATION: metadata_location} - +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties +from pyiceberg.types import ( + BinaryType, + BooleanType, + DateType, + DecimalType, + DoubleType, + FixedType, + FloatType, + IntegerType, + ListType, + LongType, + MapType, + NestedField, + PrimitiveType, + StringType, + StructType, + TimestampType, + TimestamptzType, + TimeType, + UUIDType, +) -def _construct_create_table_input(table_name: str, metadata_location: str, properties: Properties) -> TableInputTypeDef: +if TYPE_CHECKING: + import pyarrow as pa + + +# There is a unique Glue metastore in each AWS account and each AWS region. By default, GlueCatalog chooses the Glue +# metastore to use based on the user's default AWS client credential and region setup. You can specify the Glue catalog +# ID through glue.id catalog property to point to a Glue catalog in a different AWS account. The Glue catalog ID is your +# numeric AWS account ID. +GLUE_ID = "glue.id" + +# If Glue should skip archiving an old table version when creating a new version in a commit. By +# default, Glue archives all old table versions after an UpdateTable call, but Glue has a default +# max number of archived table versions (can be increased). So for streaming use case with lots +# of commits, it is recommended to set this value to true. +GLUE_SKIP_ARCHIVE = "glue.skip-archive" +GLUE_SKIP_ARCHIVE_DEFAULT = True + +ICEBERG_FIELD_ID = "iceberg.field.id" +ICEBERG_FIELD_OPTIONAL = "iceberg.field.optional" +ICEBERG_FIELD_CURRENT = "iceberg.field.current" + + +def _construct_parameters( + metadata_location: str, glue_table: Optional[TableTypeDef] = None, prev_metadata_location: Optional[str] = None +) -> Properties: + new_parameters = glue_table.get("Parameters", {}) if glue_table else {} + new_parameters.update({TABLE_TYPE: ICEBERG.upper(), METADATA_LOCATION: metadata_location}) + if prev_metadata_location: + new_parameters[PREVIOUS_METADATA_LOCATION] = prev_metadata_location + return new_parameters + + +GLUE_PRIMITIVE_TYPES = { + BooleanType: "boolean", + IntegerType: "int", + LongType: "bigint", + FloatType: "float", + DoubleType: "double", + DateType: "date", + TimeType: "string", + StringType: "string", + UUIDType: "string", + TimestampType: "timestamp", + TimestamptzType: "timestamp", + FixedType: "binary", + BinaryType: "binary", +} + + +class _IcebergSchemaToGlueType(SchemaVisitor[str]): + def schema(self, schema: Schema, struct_result: str) -> str: + return struct_result + + def struct(self, struct: StructType, field_results: List[str]) -> str: + return f"struct<{','.join(field_results)}>" + + def field(self, field: NestedField, field_result: str) -> str: + return f"{field.name}:{field_result}" + + def list(self, list_type: ListType, element_result: str) -> str: + return f"array<{element_result}>" + + def map(self, map_type: MapType, key_result: str, value_result: str) -> str: + return f"map<{key_result},{value_result}>" + + def primitive(self, primitive: PrimitiveType) -> str: + if isinstance(primitive, DecimalType): + return f"decimal({primitive.precision},{primitive.scale})" + if (primitive_type := type(primitive)) not in GLUE_PRIMITIVE_TYPES: + return str(primitive_type.root) + return GLUE_PRIMITIVE_TYPES[primitive_type] + + +def _to_columns(metadata: TableMetadata) -> List[ColumnTypeDef]: + results: Dict[str, ColumnTypeDef] = {} + + def _append_to_results(field: NestedField, is_current: bool) -> None: + if field.name in results: + return + + results[field.name] = cast( + ColumnTypeDef, + { + "Name": field.name, + "Type": visit(field.field_type, _IcebergSchemaToGlueType()), + "Parameters": { + ICEBERG_FIELD_ID: str(field.field_id), + ICEBERG_FIELD_OPTIONAL: str(field.optional).lower(), + ICEBERG_FIELD_CURRENT: str(is_current).lower(), + }, + }, + ) + if field.doc: + results[field.name]["Comment"] = field.doc + + if current_schema := metadata.schema_by_id(metadata.current_schema_id): + for field in current_schema.columns: + _append_to_results(field, True) + + for schema in metadata.schemas: + if schema.schema_id == metadata.current_schema_id: + continue + for field in schema.columns: + _append_to_results(field, False) + + return list(results.values()) + + +def _construct_table_input( + table_name: str, + metadata_location: str, + properties: Properties, + metadata: TableMetadata, + glue_table: Optional[TableTypeDef] = None, + prev_metadata_location: Optional[str] = None, +) -> TableInputTypeDef: table_input: TableInputTypeDef = { "Name": table_name, "TableType": EXTERNAL_TABLE, - "Parameters": _construct_parameters(metadata_location), + "Parameters": _construct_parameters(metadata_location, glue_table, prev_metadata_location), + "StorageDescriptor": { + "Columns": _to_columns(metadata), + "Location": metadata.location, + }, } if "Description" in properties: @@ -119,6 +257,22 @@ def _construct_database_input(database_name: str, properties: Properties) -> Dat return database_input +def _register_glue_catalog_id_with_glue_client(glue: GlueClient, glue_catalog_id: str) -> None: + """ + Register the Glue Catalog ID (AWS Account ID) as a parameter on all Glue client methods. + + It's more ergonomic to do this than to pass the CatalogId as a parameter to every client call since it's an optional + parameter and boto3 does not support 'None' values for missing parameters. + """ + event_system = glue.meta.events + + def add_glue_catalog_id(params: Dict[str, str], **kwargs: Any) -> None: + if "CatalogId" not in params: + params["CatalogId"] = glue_catalog_id + + event_system.register("provide-client-params.glue", add_glue_catalog_id) + + class GlueCatalog(Catalog): def __init__(self, name: str, **properties: Any): super().__init__(name, **properties) @@ -133,6 +287,9 @@ def __init__(self, name: str, **properties: Any): ) self.glue: GlueClient = session.client("glue") + if glue_catalog_id := properties.get(GLUE_ID): + _register_glue_catalog_id_with_glue_client(self.glue, glue_catalog_id) + def _convert_glue_to_iceberg(self, glue_table: TableTypeDef) -> Table: properties: Properties = glue_table["Parameters"] @@ -177,10 +334,32 @@ def _create_glue_table(self, database_name: str, table_name: str, table_input: T except self.glue.exceptions.EntityNotFoundException as e: raise NoSuchNamespaceError(f"Database {database_name} does not exist") from e + def _update_glue_table(self, database_name: str, table_name: str, table_input: TableInputTypeDef, version_id: str) -> None: + try: + self.glue.update_table( + DatabaseName=database_name, + TableInput=table_input, + SkipArchive=self.properties.get(GLUE_SKIP_ARCHIVE, GLUE_SKIP_ARCHIVE_DEFAULT), + VersionId=version_id, + ) + except self.glue.exceptions.EntityNotFoundException as e: + raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name} (Glue table version {version_id})") from e + except self.glue.exceptions.ConcurrentModificationException as e: + raise CommitFailedException( + f"Cannot commit {database_name}.{table_name} because Glue detected concurrent update to table version {version_id}" + ) from e + + def _get_glue_table(self, database_name: str, table_name: str) -> TableTypeDef: + try: + load_table_response = self.glue.get_table(DatabaseName=database_name, Name=table_name) + return load_table_response["Table"] + except self.glue.exceptions.EntityNotFoundException as e: + raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") from e + def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -205,6 +384,8 @@ def create_table( ValueError: If the identifier is invalid, or no path is given to store metadata. """ + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + database_name, table_name = self.identifier_to_database_and_table(identifier) location = self._resolve_table_location(location, database_name, table_name) @@ -215,7 +396,7 @@ def create_table( io = load_file_io(properties=self.properties, location=metadata_location) self._write_metadata(metadata, io, metadata_location) - table_input = _construct_create_table_input(table_name, metadata_location, properties) + table_input = _construct_table_input(table_name, metadata_location, properties, metadata) database_name, table_name = self.identifier_to_database_and_table(identifier) self._create_glue_table(database_name=database_name, table_name=table_name, table_input=table_input) @@ -247,8 +428,53 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. """ - raise NotImplementedError + identifier_tuple = self.identifier_to_tuple_without_catalog( + tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) + ) + database_name, table_name = self.identifier_to_database_and_table(identifier_tuple) + + current_glue_table = self._get_glue_table(database_name=database_name, table_name=table_name) + glue_table_version_id = current_glue_table.get("VersionId") + if not glue_table_version_id: + raise CommitFailedException(f"Cannot commit {database_name}.{table_name} because Glue table version id is missing") + current_table = self._convert_glue_to_iceberg(glue_table=current_glue_table) + base_metadata = current_table.metadata + + # Validate the update requirements + for requirement in table_request.requirements: + requirement.validate(base_metadata) + + updated_metadata = update_table_metadata(base_metadata, table_request.updates) + if updated_metadata == base_metadata: + # no changes, do nothing + return CommitTableResponse(metadata=base_metadata, metadata_location=current_table.metadata_location) + + # write new metadata + new_metadata_version = self._parse_metadata_version(current_table.metadata_location) + 1 + new_metadata_location = self._get_metadata_location(current_table.metadata.location, new_metadata_version) + self._write_metadata(updated_metadata, current_table.io, new_metadata_location) + + update_table_input = _construct_table_input( + table_name=table_name, + metadata_location=new_metadata_location, + properties=current_table.properties, + metadata=updated_metadata, + glue_table=current_glue_table, + prev_metadata_location=current_table.metadata_location, + ) + + # Pass `version_id` to implement optimistic locking: it ensures updates are rejected if concurrent + # modifications occur. See more details at https://iceberg.apache.org/docs/latest/aws/#optimistic-locking + self._update_glue_table( + database_name=database_name, + table_name=table_name, + table_input=update_table_input, + version_id=glue_table_version_id, + ) + + return CommitTableResponse(metadata=updated_metadata, metadata_location=new_metadata_location) def load_table(self, identifier: Union[str, Identifier]) -> Table: """Load the table's metadata and returns the table instance. @@ -267,12 +493,8 @@ def load_table(self, identifier: Union[str, Identifier]) -> Table: """ identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) database_name, table_name = self.identifier_to_database_and_table(identifier_tuple, NoSuchTableError) - try: - load_table_response = self.glue.get_table(DatabaseName=database_name, Name=table_name) - except self.glue.exceptions.EntityNotFoundException as e: - raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") from e - return self._convert_glue_to_iceberg(load_table_response["Table"]) + return self._convert_glue_to_iceberg(self._get_glue_table(database_name=database_name, table_name=table_name)) def drop_table(self, identifier: Union[str, Identifier]) -> None: """Drop a table. @@ -407,7 +629,6 @@ def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: table_list: List[TableTypeDef] = [] next_token: Optional[str] = None try: - table_list_response = self.glue.get_tables(DatabaseName=database_name) while True: table_list_response = ( self.glue.get_tables(DatabaseName=database_name) @@ -434,7 +655,6 @@ def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identi return [] database_list: List[DatabaseTypeDef] = [] - databases_response = self.glue.get_databases() next_token: Optional[str] = None while True: diff --git a/pyiceberg/catalog/hive.py b/pyiceberg/catalog/hive.py index ffc9c5333c..18bbcfe084 100644 --- a/pyiceberg/catalog/hive.py +++ b/pyiceberg/catalog/hive.py @@ -15,9 +15,11 @@ # specific language governing permissions and limitations # under the License. import getpass +import socket import time from types import TracebackType from typing import ( + TYPE_CHECKING, Any, Dict, List, @@ -33,10 +35,17 @@ AlreadyExistsException, FieldSchema, InvalidOperationException, + LockComponent, + LockLevel, + LockRequest, + LockResponse, + LockState, + LockType, MetaException, NoSuchObjectException, SerDeInfo, StorageDescriptor, + UnlockRequest, ) from hive_metastore.ttypes import Database as HiveDatabase from hive_metastore.ttypes import Table as HiveTable @@ -50,11 +59,10 @@ METADATA_LOCATION, TABLE_TYPE, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( + CommitFailedException, NamespaceAlreadyExistsError, NamespaceNotEmptyError, NoSuchIcebergTableError, @@ -66,10 +74,10 @@ from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec from pyiceberg.schema import Schema, SchemaVisitor, visit from pyiceberg.serializers import FromInputFile -from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table +from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, TableProperties, update_table_metadata from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder -from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties from pyiceberg.types import ( BinaryType, BooleanType, @@ -91,6 +99,10 @@ UUIDType, ) +if TYPE_CHECKING: + import pyarrow as pa + + # Replace by visitor hive_types = { BooleanType: "boolean", @@ -116,17 +128,21 @@ class _HiveClient: _transport: TTransport _client: Client + _ugi: Optional[List[str]] - def __init__(self, uri: str): + def __init__(self, uri: str, ugi: Optional[str] = None): url_parts = urlparse(uri) transport = TSocket.TSocket(url_parts.hostname, url_parts.port) self._transport = TTransport.TBufferedTransport(transport) protocol = TBinaryProtocol.TBinaryProtocol(transport) self._client = Client(protocol) + self._ugi = ugi.split(':') if ugi else None def __enter__(self) -> Client: self._transport.open() + if self._ugi: + self._client.set_ugi(*self._ugi) return self._client def __exit__( @@ -150,6 +166,7 @@ def _construct_hive_storage_descriptor(schema: Schema, location: Optional[str]) PROP_TABLE_TYPE = "table_type" PROP_METADATA_LOCATION = "metadata_location" PROP_PREVIOUS_METADATA_LOCATION = "previous_metadata_location" +DEFAULT_PROPERTIES = {TableProperties.PARQUET_COMPRESSION: TableProperties.PARQUET_COMPRESSION_DEFAULT} def _construct_parameters(metadata_location: str, previous_metadata_location: Optional[str] = None) -> Dict[str, Any]: @@ -218,7 +235,7 @@ class HiveCatalog(Catalog): def __init__(self, name: str, **properties: str): super().__init__(name, **properties) - self._client = _HiveClient(properties["uri"]) + self._client = _HiveClient(properties["uri"], properties.get("ugi")) def _convert_hive_into_iceberg(self, table: HiveTable, io: FileIO) -> Table: properties: Dict[str, str] = table.parameters @@ -249,7 +266,7 @@ def _convert_hive_into_iceberg(self, table: HiveTable, io: FileIO) -> Table: def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -272,6 +289,9 @@ def create_table( AlreadyExistsError: If a table with the name already exists. ValueError: If the identifier is invalid. """ + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + + properties = {**DEFAULT_PROPERTIES, **properties} database_name, table_name = self.identifier_to_database_and_table(identifier) current_time_millis = int(time.time() * 1000) @@ -279,7 +299,11 @@ def create_table( metadata_location = self._get_metadata_location(location=location) metadata = new_table_metadata( - location=location, schema=schema, partition_spec=partition_spec, sort_order=sort_order, properties=properties + location=location, + schema=schema, + partition_spec=partition_spec, + sort_order=sort_order, + properties=properties, ) io = load_file_io({**self.properties, **properties}, location=location) self._write_metadata(metadata, io, metadata_location) @@ -318,6 +342,15 @@ def register_table(self, identifier: Union[str, Identifier], metadata_location: """ raise NotImplementedError + def _create_lock_request(self, database_name: str, table_name: str) -> LockRequest: + lock_component: LockComponent = LockComponent( + level=LockLevel.TABLE, type=LockType.EXCLUSIVE, dbname=database_name, tablename=table_name, isTransactional=True + ) + + lock_request: LockRequest = LockRequest(component=[lock_component], user=getpass.getuser(), hostname=socket.gethostname()) + + return lock_request + def _commit_table(self, table_request: CommitTableRequest) -> CommitTableResponse: """Update the table. @@ -329,8 +362,47 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. """ - raise NotImplementedError + identifier_tuple = self.identifier_to_tuple_without_catalog( + tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) + ) + current_table = self.load_table(identifier_tuple) + database_name, table_name = self.identifier_to_database_and_table(identifier_tuple, NoSuchTableError) + base_metadata = current_table.metadata + for requirement in table_request.requirements: + requirement.validate(base_metadata) + + updated_metadata = update_table_metadata(base_metadata, table_request.updates) + if updated_metadata == base_metadata: + # no changes, do nothing + return CommitTableResponse(metadata=base_metadata, metadata_location=current_table.metadata_location) + + # write new metadata + new_metadata_version = self._parse_metadata_version(current_table.metadata_location) + 1 + new_metadata_location = self._get_metadata_location(current_table.metadata.location, new_metadata_version) + self._write_metadata(updated_metadata, current_table.io, new_metadata_location) + + # commit to hive + # https://github.com/apache/hive/blob/master/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift#L1232 + with self._client as open_client: + lock: LockResponse = open_client.lock(self._create_lock_request(database_name, table_name)) + + try: + if lock.state != LockState.ACQUIRED: + raise CommitFailedException(f"Failed to acquire lock for {table_request.identifier}, state: {lock.state}") + + tbl = open_client.get_table(dbname=database_name, tbl_name=table_name) + tbl.parameters = _construct_parameters( + metadata_location=new_metadata_location, previous_metadata_location=current_table.metadata_location + ) + open_client.alter_table(dbname=database_name, tbl_name=table_name, new_tbl=tbl) + except NoSuchObjectException as e: + raise NoSuchTableError(f"Table does not exist: {table_name}") from e + finally: + open_client.unlock(UnlockRequest(lockid=lock.lockid)) + + return CommitTableResponse(metadata=updated_metadata, metadata_location=new_metadata_location) def load_table(self, identifier: Union[str, Identifier]) -> Table: """Load the table's metadata and return the table instance. diff --git a/pyiceberg/catalog/noop.py b/pyiceberg/catalog/noop.py index 083f851d1c..e294390e61 100644 --- a/pyiceberg/catalog/noop.py +++ b/pyiceberg/catalog/noop.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. from typing import ( + TYPE_CHECKING, List, Optional, Set, @@ -27,18 +28,20 @@ from pyiceberg.table import ( CommitTableRequest, CommitTableResponse, - SortOrder, Table, ) -from pyiceberg.table.sorting import UNSORTED_SORT_ORDER +from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties +if TYPE_CHECKING: + import pyarrow as pa + class NoopCatalog(Catalog): def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 3dbb5b72a9..f67fe2c1fd 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -16,6 +16,7 @@ # under the License. from json import JSONDecodeError from typing import ( + TYPE_CHECKING, Any, Dict, List, @@ -27,8 +28,9 @@ Union, ) -from pydantic import Field, ValidationError +from pydantic import Field, ValidationError, field_validator from requests import HTTPError, Session +from tenacity import RetryCallState, retry, retry_if_exception_type, stop_after_attempt from pyiceberg import __version__ from pyiceberg.catalog import ( @@ -36,8 +38,6 @@ URI, WAREHOUSE_LOCATION, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( @@ -56,17 +56,21 @@ TableAlreadyExistsError, UnauthorizedError, ) -from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec -from pyiceberg.schema import Schema +from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec, assign_fresh_partition_spec_ids +from pyiceberg.schema import Schema, assign_fresh_schema_ids from pyiceberg.table import ( CommitTableRequest, CommitTableResponse, Table, TableIdentifier, - TableMetadata, ) -from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder -from pyiceberg.typedef import EMPTY_DICT, UTF8, IcebergBaseModel +from pyiceberg.table.metadata import TableMetadata +from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder, assign_fresh_sort_order_ids +from pyiceberg.typedef import EMPTY_DICT, UTF8, IcebergBaseModel, Identifier, Properties +from pyiceberg.types import transform_dict_value_to_str + +if TYPE_CHECKING: + import pyarrow as pa ICEBERG_REST_SPEC_VERSION = "0.14.1" @@ -99,6 +103,8 @@ class Endpoints: CREDENTIAL = "credential" GRANT_TYPE = "grant_type" SCOPE = "scope" +AUDIENCE = "audience" +RESOURCE = "resource" TOKEN_EXCHANGE = "urn:ietf:params:oauth:grant-type:token-exchange" SEMICOLON = ":" KEY = "key" @@ -109,8 +115,23 @@ class Endpoints: SIGV4 = "rest.sigv4-enabled" SIGV4_REGION = "rest.signing-region" SIGV4_SERVICE = "rest.signing-name" +AUTH_URL = "rest.authorization-url" +HEADER_PREFIX = "header." + +NAMESPACE_SEPARATOR = b"\x1f".decode(UTF8) + + +def _retry_hook(retry_state: RetryCallState) -> None: + rest_catalog: RestCatalog = retry_state.args[0] + rest_catalog._refresh_token() # pylint: disable=protected-access + -NAMESPACE_SEPARATOR = b"\x1F".decode(UTF8) +_RETRY_ARGS = { + "retry": retry_if_exception_type(AuthorizationExpiredError), + "stop": stop_after_attempt(2), + "before_sleep": _retry_hook, + "reraise": True, +} class TableResponse(IcebergBaseModel): @@ -126,7 +147,12 @@ class CreateTableRequest(IcebergBaseModel): partition_spec: Optional[PartitionSpec] = Field(alias="partition-spec") write_order: Optional[SortOrder] = Field(alias="write-order") stage_create: bool = Field(alias="stage-create", default=False) - properties: Properties = Field(default_factory=dict) + properties: Dict[str, str] = Field(default_factory=dict) + + # validators + @field_validator('properties', mode='before') + def transform_properties_dict_value_to_str(cls, properties: Properties) -> Dict[str, str]: + return transform_dict_value_to_str(properties) class RegisterTableRequest(IcebergBaseModel): @@ -137,8 +163,10 @@ class RegisterTableRequest(IcebergBaseModel): class TokenResponse(IcebergBaseModel): access_token: str = Field() token_type: str = Field() - expires_in: int = Field() - issued_token_type: str = Field() + expires_in: Optional[int] = Field(default=None) + issued_token_type: Optional[str] = Field(default=None) + refresh_token: Optional[str] = Field(default=None) + scope: Optional[str] = Field(default=None) class ConfigResponse(IcebergBaseModel): @@ -212,26 +240,18 @@ def _create_session(self) -> Session: # Sets the client side and server side SSL cert verification, if provided as properties. if ssl_config := self.properties.get(SSL): - if ssl_ca_bundle := ssl_config.get(CA_BUNDLE): # type: ignore + if ssl_ca_bundle := ssl_config.get(CA_BUNDLE): session.verify = ssl_ca_bundle - if ssl_client := ssl_config.get(CLIENT): # type: ignore + if ssl_client := ssl_config.get(CLIENT): if all(k in ssl_client for k in (CERT, KEY)): session.cert = (ssl_client[CERT], ssl_client[KEY]) elif ssl_client_cert := ssl_client.get(CERT): session.cert = ssl_client_cert - # If we have credentials, but not a token, we want to fetch a token - if TOKEN not in self.properties and CREDENTIAL in self.properties: - self.properties[TOKEN] = self._fetch_access_token(session, self.properties[CREDENTIAL]) - - # Set Auth token for subsequent calls in the session - if token := self.properties.get(TOKEN): - session.headers[AUTHORIZATION_HEADER] = f"{BEARER_PREFIX} {token}" + self._refresh_token(session, self.properties.get(TOKEN)) # Set HTTP headers - session.headers["Content-type"] = "application/json" - session.headers["X-Client-Version"] = ICEBERG_REST_SPEC_VERSION - session.headers["User-Agent"] = f"PyIceberg/{__version__}" + self._config_headers(session) # Configure SigV4 Request Signing if str(self.properties.get(SIGV4, False)).lower() == "true": @@ -265,15 +285,36 @@ def url(self, endpoint: str, prefixed: bool = True, **kwargs: Any) -> str: return url + endpoint.format(**kwargs) + @property + def auth_url(self) -> str: + if url := self.properties.get(AUTH_URL): + return url + else: + return self.url(Endpoints.get_token, prefixed=False) + + def _extract_optional_oauth_params(self) -> Dict[str, str]: + optional_oauth_param = {SCOPE: self.properties.get(SCOPE) or CATALOG_SCOPE} + set_of_optional_params = {AUDIENCE, RESOURCE} + for param in set_of_optional_params: + if param_value := self.properties.get(param): + optional_oauth_param[param] = param_value + + return optional_oauth_param + def _fetch_access_token(self, session: Session, credential: str) -> str: if SEMICOLON in credential: client_id, client_secret = credential.split(SEMICOLON) else: client_id, client_secret = None, credential - data = {GRANT_TYPE: CLIENT_CREDENTIALS, CLIENT_ID: client_id, CLIENT_SECRET: client_secret, SCOPE: CATALOG_SCOPE} - url = self.url(Endpoints.get_token, prefixed=False) - # Uses application/x-www-form-urlencoded by default - response = session.post(url=url, data=data) + + data = {GRANT_TYPE: CLIENT_CREDENTIALS, CLIENT_ID: client_id, CLIENT_SECRET: client_secret} + + optional_oauth_params = self._extract_optional_oauth_params() + data.update(optional_oauth_params) + + response = session.post( + url=self.auth_url, data=data, headers={**session.headers, "Content-type": "application/x-www-form-urlencoded"} + ) try: response.raise_for_status() except HTTPError as exc: @@ -427,22 +468,50 @@ def _response_to_table(self, identifier_tuple: Tuple[str, ...], table_response: catalog=self, ) + def _refresh_token(self, session: Optional[Session] = None, initial_token: Optional[str] = None) -> None: + session = session or self._session + if initial_token is not None: + self.properties[TOKEN] = initial_token + elif CREDENTIAL in self.properties: + self.properties[TOKEN] = self._fetch_access_token(session, self.properties[CREDENTIAL]) + + # Set Auth token for subsequent calls in the session + if token := self.properties.get(TOKEN): + session.headers[AUTHORIZATION_HEADER] = f"{BEARER_PREFIX} {token}" + + def _config_headers(self, session: Session) -> None: + session.headers["Content-type"] = "application/json" + session.headers["X-Client-Version"] = ICEBERG_REST_SPEC_VERSION + session.headers["User-Agent"] = f"PyIceberg/{__version__}" + session.headers["X-Iceberg-Access-Delegation"] = "vended-credentials" + header_properties = self._extract_headers_from_properties() + session.headers.update(header_properties) + + def _extract_headers_from_properties(self) -> Dict[str, str]: + return {key[len(HEADER_PREFIX) :]: value for key, value in self.properties.items() if key.startswith(HEADER_PREFIX)} + + @retry(**_RETRY_ARGS) def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, properties: Properties = EMPTY_DICT, ) -> Table: + iceberg_schema = self._convert_schema_if_needed(schema) + fresh_schema = assign_fresh_schema_ids(iceberg_schema) + fresh_partition_spec = assign_fresh_partition_spec_ids(partition_spec, iceberg_schema, fresh_schema) + fresh_sort_order = assign_fresh_sort_order_ids(sort_order, iceberg_schema, fresh_schema) + namespace_and_table = self._split_identifier_for_path(identifier) request = CreateTableRequest( name=namespace_and_table["table"], location=location, - table_schema=schema, - partition_spec=partition_spec, - write_order=sort_order, + table_schema=fresh_schema, + partition_spec=fresh_partition_spec, + write_order=fresh_sort_order, properties=properties, ) serialized_json = request.model_dump_json().encode(UTF8) @@ -458,6 +527,7 @@ def create_table( table_response = TableResponse(**response.json()) return self._response_to_table(self.identifier_to_tuple(identifier), table_response) + @retry(**_RETRY_ARGS) def register_table(self, identifier: Union[str, Identifier], metadata_location: str) -> Table: """Register a new table using existing metadata. @@ -489,6 +559,7 @@ def register_table(self, identifier: Union[str, Identifier], metadata_location: table_response = TableResponse(**response.json()) return self._response_to_table(self.identifier_to_tuple(identifier), table_response) + @retry(**_RETRY_ARGS) def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: namespace_tuple = self._check_valid_namespace_identifier(namespace) namespace_concat = NAMESPACE_SEPARATOR.join(namespace_tuple) @@ -499,6 +570,7 @@ def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) return [(*table.namespace, table.name) for table in ListTablesResponse(**response.json()).identifiers] + @retry(**_RETRY_ARGS) def load_table(self, identifier: Union[str, Identifier]) -> Table: identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) response = self._session.get( @@ -512,6 +584,7 @@ def load_table(self, identifier: Union[str, Identifier]) -> Table: table_response = TableResponse(**response.json()) return self._response_to_table(identifier_tuple, table_response) + @retry(**_RETRY_ARGS) def drop_table(self, identifier: Union[str, Identifier], purge_requested: bool = False) -> None: identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) response = self._session.delete( @@ -524,9 +597,11 @@ def drop_table(self, identifier: Union[str, Identifier], purge_requested: bool = except HTTPError as exc: self._handle_non_200_response(exc, {404: NoSuchTableError}) + @retry(**_RETRY_ARGS) def purge_table(self, identifier: Union[str, Identifier]) -> None: self.drop_table(identifier=identifier, purge_requested=True) + @retry(**_RETRY_ARGS) def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: Union[str, Identifier]) -> Table: from_identifier_tuple = self.identifier_to_tuple_without_catalog(from_identifier) payload = { @@ -541,6 +616,7 @@ def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: U return self.load_table(to_identifier) + @retry(**_RETRY_ARGS) def _commit_table(self, table_request: CommitTableRequest) -> CommitTableResponse: """Update the table. @@ -552,6 +628,8 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. + CommitStateUnknownException: Failed due to an internal exception on the side of the catalog. """ response = self._session.post( self.url(Endpoints.update_table, prefixed=True, **self._split_identifier_for_path(table_request.identifier)), @@ -571,6 +649,7 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons ) return CommitTableResponse(**response.json()) + @retry(**_RETRY_ARGS) def create_namespace(self, namespace: Union[str, Identifier], properties: Properties = EMPTY_DICT) -> None: namespace_tuple = self._check_valid_namespace_identifier(namespace) payload = {"namespace": namespace_tuple, "properties": properties} @@ -580,6 +659,7 @@ def create_namespace(self, namespace: Union[str, Identifier], properties: Proper except HTTPError as exc: self._handle_non_200_response(exc, {404: NoSuchNamespaceError, 409: NamespaceAlreadyExistsError}) + @retry(**_RETRY_ARGS) def drop_namespace(self, namespace: Union[str, Identifier]) -> None: namespace_tuple = self._check_valid_namespace_identifier(namespace) namespace = NAMESPACE_SEPARATOR.join(namespace_tuple) @@ -589,6 +669,7 @@ def drop_namespace(self, namespace: Union[str, Identifier]) -> None: except HTTPError as exc: self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) + @retry(**_RETRY_ARGS) def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identifier]: namespace_tuple = self.identifier_to_tuple(namespace) response = self._session.get( @@ -606,6 +687,7 @@ def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identi namespaces = ListNamespaceResponse(**response.json()) return [namespace_tuple + child_namespace for child_namespace in namespaces.namespaces] + @retry(**_RETRY_ARGS) def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Properties: namespace_tuple = self._check_valid_namespace_identifier(namespace) namespace = NAMESPACE_SEPARATOR.join(namespace_tuple) @@ -617,6 +699,7 @@ def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Proper return NamespaceResponse(**response.json()).properties + @retry(**_RETRY_ARGS) def update_namespace_properties( self, namespace: Union[str, Identifier], removals: Optional[Set[str]] = None, updates: Properties = EMPTY_DICT ) -> PropertiesUpdateSummary: diff --git a/pyiceberg/catalog/sql.py b/pyiceberg/catalog/sql.py index 77ece56163..d44d4996b6 100644 --- a/pyiceberg/catalog/sql.py +++ b/pyiceberg/catalog/sql.py @@ -16,6 +16,7 @@ # under the License. from typing import ( + TYPE_CHECKING, List, Optional, Set, @@ -31,7 +32,7 @@ union, update, ) -from sqlalchemy.exc import IntegrityError, OperationalError +from sqlalchemy.exc import IntegrityError, NoResultFound, OperationalError, ProgrammingError from sqlalchemy.orm import ( DeclarativeBase, Mapped, @@ -43,11 +44,10 @@ from pyiceberg.catalog import ( METADATA_LOCATION, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( + CommitFailedException, NamespaceAlreadyExistsError, NamespaceNotEmptyError, NoSuchNamespaceError, @@ -59,10 +59,13 @@ from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.serializers import FromInputFile -from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table +from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, update_table_metadata from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder -from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties + +if TYPE_CHECKING: + import pyarrow as pa class SqlCatalogBaseTable(MappedAsDataclass, DeclarativeBase): @@ -96,7 +99,8 @@ def __init__(self, name: str, **properties: str): if not (uri_prop := self.properties.get("uri")): raise NoSuchPropertyException("SQL connection URI is required") - self.engine = create_engine(uri_prop, echo=True) + echo = bool(self.properties.get("echo", False)) + self.engine = create_engine(uri_prop, echo=echo) self._ensure_tables_exist() @@ -106,7 +110,10 @@ def _ensure_tables_exist(self) -> None: stmt = select(1).select_from(table) try: session.scalar(stmt) - except OperationalError: + except ( + OperationalError, + ProgrammingError, + ): # sqlalchemy returns OperationalError in case of sqlite and ProgrammingError with postgres. self.create_tables() return @@ -139,7 +146,7 @@ def _convert_orm_to_iceberg(self, orm_table: IcebergTables) -> Table: def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -164,6 +171,8 @@ def create_table( ValueError: If the identifier is invalid, or no path is given to store metadata. """ + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + database_name, table_name = self.identifier_to_database_and_table(identifier) if not self._namespace_exists(database_name): raise NoSuchNamespaceError(f"Namespace does not exist: {database_name}") @@ -268,16 +277,32 @@ def drop_table(self, identifier: Union[str, Identifier]) -> None: identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) database_name, table_name = self.identifier_to_database_and_table(identifier_tuple, NoSuchTableError) with Session(self.engine) as session: - res = session.execute( - delete(IcebergTables).where( - IcebergTables.catalog_name == self.name, - IcebergTables.table_namespace == database_name, - IcebergTables.table_name == table_name, + if self.engine.dialect.supports_sane_rowcount: + res = session.execute( + delete(IcebergTables).where( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == database_name, + IcebergTables.table_name == table_name, + ) ) - ) + if res.rowcount < 1: + raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") + else: + try: + tbl = ( + session.query(IcebergTables) + .with_for_update(of=IcebergTables) + .filter( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == database_name, + IcebergTables.table_name == table_name, + ) + .one() + ) + session.delete(tbl) + except NoResultFound as e: + raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") from e session.commit() - if res.rowcount < 1: - raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: Union[str, Identifier]) -> Table: """Rename a fully classified table name. @@ -301,18 +326,35 @@ def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: U raise NoSuchNamespaceError(f"Namespace does not exist: {to_database_name}") with Session(self.engine) as session: try: - stmt = ( - update(IcebergTables) - .where( - IcebergTables.catalog_name == self.name, - IcebergTables.table_namespace == from_database_name, - IcebergTables.table_name == from_table_name, + if self.engine.dialect.supports_sane_rowcount: + stmt = ( + update(IcebergTables) + .where( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == from_database_name, + IcebergTables.table_name == from_table_name, + ) + .values(table_namespace=to_database_name, table_name=to_table_name) ) - .values(table_namespace=to_database_name, table_name=to_table_name) - ) - result = session.execute(stmt) - if result.rowcount < 1: - raise NoSuchTableError(f"Table does not exist: {from_table_name}") + result = session.execute(stmt) + if result.rowcount < 1: + raise NoSuchTableError(f"Table does not exist: {from_table_name}") + else: + try: + tbl = ( + session.query(IcebergTables) + .with_for_update(of=IcebergTables) + .filter( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == from_database_name, + IcebergTables.table_name == from_table_name, + ) + .one() + ) + tbl.table_namespace = to_database_name + tbl.table_name = to_table_name + except NoResultFound as e: + raise NoSuchTableError(f"Table does not exist: {from_table_name}") from e session.commit() except IntegrityError as e: raise TableAlreadyExistsError(f"Table {to_database_name}.{to_table_name} already exists") from e @@ -329,8 +371,62 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. """ - raise NotImplementedError + identifier_tuple = self.identifier_to_tuple_without_catalog( + tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) + ) + current_table = self.load_table(identifier_tuple) + database_name, table_name = self.identifier_to_database_and_table(identifier_tuple, NoSuchTableError) + base_metadata = current_table.metadata + for requirement in table_request.requirements: + requirement.validate(base_metadata) + + updated_metadata = update_table_metadata(base_metadata, table_request.updates) + if updated_metadata == base_metadata: + # no changes, do nothing + return CommitTableResponse(metadata=base_metadata, metadata_location=current_table.metadata_location) + + # write new metadata + new_metadata_version = self._parse_metadata_version(current_table.metadata_location) + 1 + new_metadata_location = self._get_metadata_location(current_table.metadata.location, new_metadata_version) + self._write_metadata(updated_metadata, current_table.io, new_metadata_location) + + with Session(self.engine) as session: + if self.engine.dialect.supports_sane_rowcount: + stmt = ( + update(IcebergTables) + .where( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == database_name, + IcebergTables.table_name == table_name, + IcebergTables.metadata_location == current_table.metadata_location, + ) + .values(metadata_location=new_metadata_location, previous_metadata_location=current_table.metadata_location) + ) + result = session.execute(stmt) + if result.rowcount < 1: + raise CommitFailedException(f"Table has been updated by another process: {database_name}.{table_name}") + else: + try: + tbl = ( + session.query(IcebergTables) + .with_for_update(of=IcebergTables) + .filter( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == database_name, + IcebergTables.table_name == table_name, + IcebergTables.metadata_location == current_table.metadata_location, + ) + .one() + ) + tbl.metadata_location = new_metadata_location + tbl.previous_metadata_location = current_table.metadata_location + except NoResultFound as e: + raise CommitFailedException(f"Table has been updated by another process: {database_name}.{table_name}") from e + session.commit() + + return CommitTableResponse(metadata=updated_metadata, metadata_location=new_metadata_location) def _namespace_exists(self, identifier: Union[str, Identifier]) -> bool: namespace = self.identifier_to_database(identifier) @@ -469,7 +565,9 @@ def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Proper Raises: NoSuchNamespaceError: If a namespace with the given name does not exist. """ - database_name = self.identifier_to_database(namespace, NoSuchNamespaceError) + database_name = self.identifier_to_database(namespace) + if not self._namespace_exists(database_name): + raise NoSuchNamespaceError(f"Database {database_name} does not exists") stmt = select(IcebergNamespaceProperties).where( IcebergNamespaceProperties.catalog_name == self.name, IcebergNamespaceProperties.namespace == database_name diff --git a/pyiceberg/cli/console.py b/pyiceberg/cli/console.py index 092910b5f6..0fbda10960 100644 --- a/pyiceberg/cli/console.py +++ b/pyiceberg/cli/console.py @@ -59,11 +59,22 @@ def wrapper(*args: Any, **kwargs: Any): # type: ignore @click.option("--catalog") @click.option("--verbose", type=click.BOOL) @click.option("--output", type=click.Choice(["text", "json"]), default="text") +@click.option("--ugi") @click.option("--uri") @click.option("--credential") @click.pass_context -def run(ctx: Context, catalog: Optional[str], verbose: bool, output: str, uri: Optional[str], credential: Optional[str]) -> None: +def run( + ctx: Context, + catalog: Optional[str], + verbose: bool, + output: str, + ugi: Optional[str], + uri: Optional[str], + credential: Optional[str], +) -> None: properties = {} + if ugi: + properties["ugi"] = ugi if uri: properties["uri"] = uri if credential: @@ -206,6 +217,23 @@ def version(ctx: Context) -> None: ctx.obj["output"].version(__version__) +@run.group() +def create() -> None: + """Operation to create a namespace.""" + + +@create.command() +@click.argument("identifier") +@click.pass_context +@catch_exception() +def namespace(ctx: Context, identifier: str) -> None: + """Create a namespace.""" + catalog, output = _catalog_and_output(ctx) + + catalog.create_namespace(identifier) + output.text(f"Created namespace: {identifier}") + + @run.group() def drop() -> None: """Operations to drop a namespace or table.""" @@ -223,11 +251,11 @@ def table(ctx: Context, identifier: str) -> None: # noqa: F811 output.text(f"Dropped table: {identifier}") -@drop.command() +@drop.command() # type: ignore @click.argument("identifier") @click.pass_context @catch_exception() -def namespace(ctx: Context, identifier: str) -> None: +def namespace(ctx: Context, identifier: str) -> None: # noqa: F811 """Drop a namespace.""" catalog, output = _catalog_and_output(ctx) diff --git a/pyiceberg/cli/output.py b/pyiceberg/cli/output.py index 8e0ad2deee..56b544c99f 100644 --- a/pyiceberg/cli/output.py +++ b/pyiceberg/cli/output.py @@ -31,7 +31,8 @@ from pyiceberg.partitioning import PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.table import Table, TableMetadata +from pyiceberg.table import Table +from pyiceberg.table.metadata import TableMetadata from pyiceberg.table.refs import SnapshotRefType from pyiceberg.typedef import IcebergBaseModel, Identifier, Properties @@ -40,48 +41,37 @@ class Output(ABC): """Output interface for exporting.""" @abstractmethod - def exception(self, ex: Exception) -> None: - ... + def exception(self, ex: Exception) -> None: ... @abstractmethod - def identifiers(self, identifiers: List[Identifier]) -> None: - ... + def identifiers(self, identifiers: List[Identifier]) -> None: ... @abstractmethod - def describe_table(self, table: Table) -> None: - ... + def describe_table(self, table: Table) -> None: ... @abstractmethod - def files(self, table: Table, history: bool) -> None: - ... + def files(self, table: Table, history: bool) -> None: ... @abstractmethod - def describe_properties(self, properties: Properties) -> None: - ... + def describe_properties(self, properties: Properties) -> None: ... @abstractmethod - def text(self, response: str) -> None: - ... + def text(self, response: str) -> None: ... @abstractmethod - def schema(self, schema: Schema) -> None: - ... + def schema(self, schema: Schema) -> None: ... @abstractmethod - def spec(self, spec: PartitionSpec) -> None: - ... + def spec(self, spec: PartitionSpec) -> None: ... @abstractmethod - def uuid(self, uuid: Optional[UUID]) -> None: - ... + def uuid(self, uuid: Optional[UUID]) -> None: ... @abstractmethod - def version(self, version: str) -> None: - ... + def version(self, version: str) -> None: ... @abstractmethod - def describe_refs(self, refs: List[Tuple[str, SnapshotRefType, Dict[str, str]]]) -> None: - ... + def describe_refs(self, refs: List[Tuple[str, SnapshotRefType, Dict[str, str]]]) -> None: ... class ConsoleOutput(Output): @@ -168,7 +158,7 @@ def describe_properties(self, properties: Properties) -> None: Console().print(output_table) def text(self, response: str) -> None: - Console().print(response) + Console(soft_wrap=True).print(response) def schema(self, schema: Schema) -> None: output_table = self._table @@ -252,10 +242,8 @@ def version(self, version: str) -> None: self._out({"version": version}) def describe_refs(self, refs: List[Tuple[str, SnapshotRefType, Dict[str, str]]]) -> None: - self._out( - [ - {"name": name, "type": type, detail_key: detail_val} - for name, type, detail in refs - for detail_key, detail_val in detail.items() - ] - ) + self._out([ + {"name": name, "type": type, detail_key: detail_val} + for name, type, detail in refs + for detail_key, detail_val in detail.items() + ]) diff --git a/pyiceberg/conversions.py b/pyiceberg/conversions.py index 83ff172993..2a03a4de35 100644 --- a/pyiceberg/conversions.py +++ b/pyiceberg/conversions.py @@ -27,6 +27,7 @@ implementation, a concrete function is registered for each generic conversion function. For PrimitiveType implementations that share the same conversion logic, registrations can be stacked. """ + import uuid from datetime import date, datetime, time from decimal import Decimal @@ -266,7 +267,7 @@ def _(primitive_type: DecimalType, value: Decimal) -> bytes: return decimal_to_bytes(value) -@singledispatch +@singledispatch # type: ignore def from_bytes(primitive_type: PrimitiveType, b: bytes) -> L: # type: ignore """Convert bytes to a built-in python value. diff --git a/pyiceberg/expressions/__init__.py b/pyiceberg/expressions/__init__.py index cb46a70401..5adf3a8a48 100644 --- a/pyiceberg/expressions/__init__.py +++ b/pyiceberg/expressions/__init__.py @@ -80,13 +80,11 @@ class Unbound(Generic[B], ABC): """Represents an unbound value expression.""" @abstractmethod - def bind(self, schema: Schema, case_sensitive: bool = True) -> B: - ... + def bind(self, schema: Schema, case_sensitive: bool = True) -> B: ... @property @abstractmethod - def as_bound(self) -> Type[Bound]: - ... + def as_bound(self) -> Type[Bound]: ... class BoundTerm(Term[L], Bound, ABC): @@ -142,8 +140,7 @@ class UnboundTerm(Term[Any], Unbound[BoundTerm[L]], ABC): """Represents an unbound term.""" @abstractmethod - def bind(self, schema: Schema, case_sensitive: bool = True) -> BoundTerm[L]: - ... + def bind(self, schema: Schema, case_sensitive: bool = True) -> BoundTerm[L]: ... class Reference(UnboundTerm[Any]): @@ -352,8 +349,7 @@ def __eq__(self, other: Any) -> bool: @property @abstractmethod - def as_unbound(self) -> Type[UnboundPredicate[Any]]: - ... + def as_unbound(self) -> Type[UnboundPredicate[Any]]: ... class UnboundPredicate(Generic[L], Unbound[BooleanExpression], BooleanExpression, ABC): @@ -367,13 +363,11 @@ def __eq__(self, other: Any) -> bool: return self.term == other.term if isinstance(other, self.__class__) else False @abstractmethod - def bind(self, schema: Schema, case_sensitive: bool = True) -> BooleanExpression: - ... + def bind(self, schema: Schema, case_sensitive: bool = True) -> BooleanExpression: ... @property @abstractmethod - def as_bound(self) -> Type[BoundPredicate[L]]: - ... + def as_bound(self) -> Type[BoundPredicate[L]]: ... class UnaryPredicate(UnboundPredicate[Any], ABC): @@ -387,8 +381,7 @@ def __repr__(self) -> str: @property @abstractmethod - def as_bound(self) -> Type[BoundUnaryPredicate[Any]]: - ... + def as_bound(self) -> Type[BoundUnaryPredicate[Any]]: ... class BoundUnaryPredicate(BoundPredicate[L], ABC): @@ -398,8 +391,7 @@ def __repr__(self) -> str: @property @abstractmethod - def as_unbound(self) -> Type[UnaryPredicate]: - ... + def as_unbound(self) -> Type[UnaryPredicate]: ... def __getnewargs__(self) -> Tuple[BoundTerm[L]]: """Pickle the BoundUnaryPredicate class.""" @@ -575,8 +567,7 @@ def __getnewargs__(self) -> Tuple[BoundTerm[L], Set[Literal[L]]]: @property @abstractmethod - def as_unbound(self) -> Type[SetPredicate[L]]: - ... + def as_unbound(self) -> Type[SetPredicate[L]]: ... class BoundIn(BoundSetPredicate[L]): @@ -705,8 +696,7 @@ def __repr__(self) -> str: @property @abstractmethod - def as_bound(self) -> Type[BoundLiteralPredicate[L]]: - ... + def as_bound(self) -> Type[BoundLiteralPredicate[L]]: ... class BoundLiteralPredicate(BoundPredicate[L], ABC): @@ -729,8 +719,7 @@ def __repr__(self) -> str: @property @abstractmethod - def as_unbound(self) -> Type[LiteralPredicate[L]]: - ... + def as_unbound(self) -> Type[LiteralPredicate[L]]: ... class BoundEqualTo(BoundLiteralPredicate[L]): diff --git a/pyiceberg/expressions/literals.py b/pyiceberg/expressions/literals.py index 6d65058897..bf76be36e4 100644 --- a/pyiceberg/expressions/literals.py +++ b/pyiceberg/expressions/literals.py @@ -78,8 +78,7 @@ def value(self) -> L: @singledispatchmethod @abstractmethod - def to(self, type_var: IcebergType) -> Literal[L]: - ... # pragma: no cover + def to(self, type_var: IcebergType) -> Literal[L]: ... # pragma: no cover def __repr__(self) -> str: """Return the string representation of the Literal class.""" diff --git a/pyiceberg/expressions/visitors.py b/pyiceberg/expressions/visitors.py index a13c1c5045..79bc995198 100644 --- a/pyiceberg/expressions/visitors.py +++ b/pyiceberg/expressions/visitors.py @@ -54,7 +54,6 @@ BoundStartsWith, BoundTerm, BoundUnaryPredicate, - L, Not, Or, UnboundPredicate, @@ -63,7 +62,7 @@ from pyiceberg.manifest import DataFile, ManifestFile, PartitionFieldSummary from pyiceberg.partitioning import PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.typedef import EMPTY_DICT, StructProtocol +from pyiceberg.typedef import EMPTY_DICT, L, StructProtocol from pyiceberg.types import ( DoubleType, FloatType, @@ -892,8 +891,13 @@ def visit_unbound_predicate(self, predicate: UnboundPredicate[L]) -> BooleanExpr def visit_bound_predicate(self, predicate: BoundPredicate[L]) -> BooleanExpression: file_column_name = self.file_schema.find_column_name(predicate.term.ref().field.field_id) - if not file_column_name: - raise ValueError(f"Not found in file schema: {file_column_name}") + if file_column_name is None: + # In the case of schema evolution, the column might not be present + # in the file schema when reading older data + if isinstance(predicate, BoundIsNull): + return AlwaysTrue() + else: + return AlwaysFalse() if isinstance(predicate, BoundUnaryPredicate): return predicate.as_unbound(file_column_name) diff --git a/pyiceberg/io/__init__.py b/pyiceberg/io/__init__.py index dc94dc8a4e..f03048c8cd 100644 --- a/pyiceberg/io/__init__.py +++ b/pyiceberg/io/__init__.py @@ -22,6 +22,7 @@ for returning an InputFile instance, an OutputFile instance, and deleting a file given its location. """ + from __future__ import annotations import importlib @@ -78,20 +79,16 @@ class InputStream(Protocol): """ @abstractmethod - def read(self, size: int = 0) -> bytes: - ... + def read(self, size: int = 0) -> bytes: ... @abstractmethod - def seek(self, offset: int, whence: int = SEEK_SET) -> int: - ... + def seek(self, offset: int, whence: int = SEEK_SET) -> int: ... @abstractmethod - def tell(self) -> int: - ... + def tell(self) -> int: ... @abstractmethod - def close(self) -> None: - ... + def close(self) -> None: ... def __enter__(self) -> InputStream: """Provide setup when opening an InputStream using a 'with' statement.""" @@ -112,12 +109,10 @@ class OutputStream(Protocol): # pragma: no cover """ @abstractmethod - def write(self, b: bytes) -> int: - ... + def write(self, b: bytes) -> int: ... @abstractmethod - def close(self) -> None: - ... + def close(self) -> None: ... @abstractmethod def __enter__(self) -> OutputStream: @@ -280,7 +275,7 @@ def delete(self, location: Union[str, InputFile, OutputFile]) -> None: "s3a": [ARROW_FILE_IO, FSSPEC_FILE_IO], "s3n": [ARROW_FILE_IO, FSSPEC_FILE_IO], "gs": [ARROW_FILE_IO], - "file": [ARROW_FILE_IO], + "file": [ARROW_FILE_IO, FSSPEC_FILE_IO], "hdfs": [ARROW_FILE_IO], "abfs": [FSSPEC_FILE_IO], "abfss": [FSSPEC_FILE_IO], diff --git a/pyiceberg/io/fsspec.py b/pyiceberg/io/fsspec.py index 18b1f5b885..957cac66f2 100644 --- a/pyiceberg/io/fsspec.py +++ b/pyiceberg/io/fsspec.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. """FileIO implementation for reading and writing table files that uses fsspec compatible filesystems.""" + import errno import json import logging @@ -98,7 +99,7 @@ def s3v4_rest_signer(properties: Properties, request: AWSRequest, **_: Any) -> A def _file(_: Properties) -> LocalFileSystem: - return LocalFileSystem() + return LocalFileSystem(auto_mkdir=True) def _s3(properties: Properties) -> AbstractFileSystem: @@ -129,7 +130,7 @@ def _s3(properties: Properties) -> AbstractFileSystem: config_kwargs["proxies"] = {"http": proxy_uri, "https": proxy_uri} if connect_timeout := properties.get(S3_CONNECT_TIMEOUT): - config_kwargs["connect_timeout"] = connect_timeout + config_kwargs["connect_timeout"] = float(connect_timeout) fs = S3FileSystem(client_kwargs=client_kwargs, config_kwargs=config_kwargs) @@ -172,6 +173,7 @@ def _adlfs(properties: Properties) -> AbstractFileSystem: SCHEME_TO_FS = { + "": _file, "file": _file, "s3": _s3, "s3a": _s3, diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index a537cf7a30..f1e4d302ec 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -22,9 +22,12 @@ type to use. Theoretically, this allows the supported storage types to grow naturally with the pyarrow library. """ + from __future__ import annotations import concurrent.futures +import fnmatch +import itertools import logging import os import re @@ -33,7 +36,6 @@ from dataclasses import dataclass from enum import Enum from functools import lru_cache, singledispatch -from itertools import chain from typing import ( TYPE_CHECKING, Any, @@ -67,14 +69,14 @@ ) from sortedcontainers import SortedList -from pyiceberg.avro.resolver import ResolveError from pyiceberg.conversions import to_bytes +from pyiceberg.exceptions import ResolveError from pyiceberg.expressions import ( AlwaysTrue, BooleanExpression, BoundTerm, - Literal, ) +from pyiceberg.expressions.literals import Literal from pyiceberg.expressions.visitors import ( BoundBooleanExpressionVisitor, bind, @@ -104,7 +106,11 @@ OutputFile, OutputStream, ) -from pyiceberg.manifest import DataFile, FileFormat +from pyiceberg.manifest import ( + DataFile, + DataFileContent, + FileFormat, +) from pyiceberg.schema import ( PartnerAccessor, PreOrderSchemaVisitor, @@ -118,8 +124,11 @@ visit, visit_with_partner, ) +from pyiceberg.table import PropertyUtil, TableProperties, WriteTask +from pyiceberg.table.metadata import TableMetadata +from pyiceberg.table.name_mapping import NameMapping from pyiceberg.transforms import TruncateTransform -from pyiceberg.typedef import EMPTY_DICT, Properties +from pyiceberg.typedef import EMPTY_DICT, Properties, Record from pyiceberg.types import ( BinaryType, BooleanType, @@ -155,14 +164,24 @@ ONE_MEGABYTE = 1024 * 1024 BUFFER_SIZE = "buffer-size" ICEBERG_SCHEMA = b"iceberg.schema" -FIELD_ID = "field_id" +# The PARQUET: in front means that it is Parquet specific, in this case the field_id +PYARROW_PARQUET_FIELD_ID_KEY = b"PARQUET:field_id" +PYARROW_FIELD_DOC_KEY = b"doc" +LIST_ELEMENT_NAME = "element" +MAP_KEY_NAME = "key" +MAP_VALUE_NAME = "value" DOC = "doc" -PYARROW_FIELD_ID_KEYS = [b"PARQUET:field_id", b"field_id"] -PYARROW_FIELD_DOC_KEYS = [b"PARQUET:field_doc", b"field_doc", b"doc"] T = TypeVar("T") +class PyArrowLocalFileSystem(pyarrow.fs.LocalFileSystem): + def open_output_stream(self, path: str, *args: Any, **kwargs: Any) -> pyarrow.NativeFile: + # In LocalFileSystem, parent directories must be first created before opening an output stream + self.create_dir(os.path.dirname(path), recursive=True) + return super().open_output_stream(path, *args, **kwargs) + + class PyArrowFile(InputFile, OutputFile): """A combined InputFile and OutputFile implementation that uses a pyarrow filesystem to generate pyarrow.lib.NativeFile instances. @@ -320,7 +339,7 @@ def _initialize_fs(self, scheme: str, netloc: Optional[str] = None) -> FileSyste if scheme in {"s3", "s3a", "s3n"}: from pyarrow.fs import S3FileSystem - client_kwargs = { + client_kwargs: Dict[str, Any] = { "endpoint_override": self.properties.get(S3_ENDPOINT), "access_key": self.properties.get(S3_ACCESS_KEY_ID), "secret_key": self.properties.get(S3_SECRET_ACCESS_KEY), @@ -332,7 +351,7 @@ def _initialize_fs(self, scheme: str, netloc: Optional[str] = None) -> FileSyste client_kwargs["proxy_options"] = proxy_uri if connect_timeout := self.properties.get(S3_CONNECT_TIMEOUT): - client_kwargs["connect_timeout"] = connect_timeout + client_kwargs["connect_timeout"] = float(connect_timeout) return S3FileSystem(**client_kwargs) elif scheme == "hdfs": @@ -369,9 +388,7 @@ def _initialize_fs(self, scheme: str, netloc: Optional[str] = None) -> FileSyste return GcsFileSystem(**gcs_kwargs) elif scheme == "file": - from pyarrow.fs import LocalFileSystem - - return LocalFileSystem() + return PyArrowLocalFileSystem() else: raise ValueError(f"Unrecognized filesystem type in URI: {scheme}") @@ -460,7 +477,9 @@ def field(self, field: NestedField, field_result: pa.DataType) -> pa.Field: name=field.name, type=field_result, nullable=field.optional, - metadata={DOC: field.doc, FIELD_ID: str(field.field_id)} if field.doc else {FIELD_ID: str(field.field_id)}, + metadata={PYARROW_FIELD_DOC_KEY: field.doc, PYARROW_PARQUET_FIELD_ID_KEY: str(field.field_id)} + if field.doc + else {PYARROW_PARQUET_FIELD_ID_KEY: str(field.field_id)}, ) def list(self, list_type: ListType, element_result: pa.DataType) -> pa.DataType: @@ -515,7 +534,7 @@ def visit_uuid(self, _: UUIDType) -> pa.DataType: return pa.binary(16) def visit_binary(self, _: BinaryType) -> pa.DataType: - return pa.binary() + return pa.large_binary() def _convert_scalar(value: Any, iceberg_type: IcebergType) -> pa.scalar: @@ -620,15 +639,27 @@ def _combine_positional_deletes(positional_deletes: List[pa.ChunkedArray], rows: if len(positional_deletes) == 1: all_chunks = positional_deletes[0] else: - all_chunks = pa.chunked_array(chain(*[arr.chunks for arr in positional_deletes])) + all_chunks = pa.chunked_array(itertools.chain(*[arr.chunks for arr in positional_deletes])) return np.setdiff1d(np.arange(rows), all_chunks, assume_unique=False) -def pyarrow_to_schema(schema: pa.Schema) -> Schema: - visitor = _ConvertToIceberg() +def pyarrow_to_schema(schema: pa.Schema, name_mapping: Optional[NameMapping] = None) -> Schema: + has_ids = visit_pyarrow(schema, _HasIds()) + if has_ids: + visitor = _ConvertToIceberg() + elif name_mapping is not None: + visitor = _ConvertToIceberg(name_mapping=name_mapping) + else: + raise ValueError( + "Parquet file does not have field-ids and the Iceberg table does not have 'schema.name-mapping.default' defined" + ) return visit_pyarrow(schema, visitor) +def _pyarrow_to_schema_without_ids(schema: pa.Schema) -> Schema: + return visit_pyarrow(schema, _ConvertToIcebergWithoutIDs()) + + @singledispatch def visit_pyarrow(obj: Union[pa.DataType, pa.Schema], visitor: PyArrowSchemaVisitor[T]) -> T: """Apply a pyarrow schema visitor to any point within a schema. @@ -646,50 +677,49 @@ def visit_pyarrow(obj: Union[pa.DataType, pa.Schema], visitor: PyArrowSchemaVisi @visit_pyarrow.register(pa.Schema) -def _(obj: pa.Schema, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: - struct_results: List[Optional[T]] = [] - for field in obj: - visitor.before_field(field) - struct_result = visit_pyarrow(field.type, visitor) - visitor.after_field(field) - struct_results.append(struct_result) - - return visitor.schema(obj, struct_results) +def _(obj: pa.Schema, visitor: PyArrowSchemaVisitor[T]) -> T: + return visitor.schema(obj, visit_pyarrow(pa.struct(obj), visitor)) @visit_pyarrow.register(pa.StructType) -def _(obj: pa.StructType, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: - struct_results: List[Optional[T]] = [] +def _(obj: pa.StructType, visitor: PyArrowSchemaVisitor[T]) -> T: + results = [] + for field in obj: visitor.before_field(field) - struct_result = visit_pyarrow(field.type, visitor) + result = visit_pyarrow(field.type, visitor) + results.append(visitor.field(field, result)) visitor.after_field(field) - struct_results.append(struct_result) - return visitor.struct(obj, struct_results) + return visitor.struct(obj, results) @visit_pyarrow.register(pa.ListType) -def _(obj: pa.ListType, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: - visitor.before_field(obj.value_field) - list_result = visit_pyarrow(obj.value_field.type, visitor) - visitor.after_field(obj.value_field) - return visitor.list(obj, list_result) +@visit_pyarrow.register(pa.FixedSizeListType) +@visit_pyarrow.register(pa.LargeListType) +def _(obj: Union[pa.ListType, pa.LargeListType, pa.FixedSizeListType], visitor: PyArrowSchemaVisitor[T]) -> T: + visitor.before_list_element(obj.value_field) + result = visit_pyarrow(obj.value_type, visitor) + visitor.after_list_element(obj.value_field) + + return visitor.list(obj, result) @visit_pyarrow.register(pa.MapType) -def _(obj: pa.MapType, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: - visitor.before_field(obj.key_field) - key_result = visit_pyarrow(obj.key_field.type, visitor) - visitor.after_field(obj.key_field) - visitor.before_field(obj.item_field) - value_result = visit_pyarrow(obj.item_field.type, visitor) - visitor.after_field(obj.item_field) +def _(obj: pa.MapType, visitor: PyArrowSchemaVisitor[T]) -> T: + visitor.before_map_key(obj.key_field) + key_result = visit_pyarrow(obj.key_type, visitor) + visitor.after_map_key(obj.key_field) + + visitor.before_map_value(obj.item_field) + value_result = visit_pyarrow(obj.item_type, visitor) + visitor.after_map_value(obj.item_field) + return visitor.map(obj, key_result, value_result) @visit_pyarrow.register(pa.DataType) -def _(obj: pa.DataType, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: +def _(obj: pa.DataType, visitor: PyArrowSchemaVisitor[T]) -> T: if pa.types.is_nested(obj): raise TypeError(f"Expected primitive type, got: {type(obj)}") return visitor.primitive(obj) @@ -702,83 +732,143 @@ def before_field(self, field: pa.Field) -> None: def after_field(self, field: pa.Field) -> None: """Override this method to perform an action immediately after visiting a field.""" + def before_list_element(self, element: pa.Field) -> None: + """Override this method to perform an action immediately before visiting an element within a ListType.""" + + def after_list_element(self, element: pa.Field) -> None: + """Override this method to perform an action immediately after visiting an element within a ListType.""" + + def before_map_key(self, key: pa.Field) -> None: + """Override this method to perform an action immediately before visiting a key within a MapType.""" + + def after_map_key(self, key: pa.Field) -> None: + """Override this method to perform an action immediately after visiting a key within a MapType.""" + + def before_map_value(self, value: pa.Field) -> None: + """Override this method to perform an action immediately before visiting a value within a MapType.""" + + def after_map_value(self, value: pa.Field) -> None: + """Override this method to perform an action immediately after visiting a value within a MapType.""" + @abstractmethod - def schema(self, schema: pa.Schema, field_results: List[Optional[T]]) -> Optional[T]: + def schema(self, schema: pa.Schema, struct_result: T) -> T: """Visit a schema.""" @abstractmethod - def struct(self, struct: pa.StructType, field_results: List[Optional[T]]) -> Optional[T]: + def struct(self, struct: pa.StructType, field_results: List[T]) -> T: """Visit a struct.""" @abstractmethod - def list(self, list_type: pa.ListType, element_result: Optional[T]) -> Optional[T]: + def field(self, field: pa.Field, field_result: T) -> T: + """Visit a field.""" + + @abstractmethod + def list(self, list_type: pa.ListType, element_result: T) -> T: """Visit a list.""" @abstractmethod - def map(self, map_type: pa.MapType, key_result: Optional[T], value_result: Optional[T]) -> Optional[T]: + def map(self, map_type: pa.MapType, key_result: T, value_result: T) -> T: """Visit a map.""" @abstractmethod - def primitive(self, primitive: pa.DataType) -> Optional[T]: + def primitive(self, primitive: pa.DataType) -> T: """Visit a primitive type.""" def _get_field_id(field: pa.Field) -> Optional[int]: - for pyarrow_field_id_key in PYARROW_FIELD_ID_KEYS: - if field_id_str := field.metadata.get(pyarrow_field_id_key): - return int(field_id_str.decode()) - return None + return ( + int(field_id_str.decode()) + if (field.metadata and (field_id_str := field.metadata.get(PYARROW_PARQUET_FIELD_ID_KEY))) + else None + ) + +class _HasIds(PyArrowSchemaVisitor[bool]): + def schema(self, schema: pa.Schema, struct_result: bool) -> bool: + return struct_result -def _get_field_doc(field: pa.Field) -> Optional[str]: - for pyarrow_doc_key in PYARROW_FIELD_DOC_KEYS: - if doc_str := field.metadata.get(pyarrow_doc_key): - return doc_str.decode() - return None + def struct(self, struct: pa.StructType, field_results: List[bool]) -> bool: + return all(field_results) + def field(self, field: pa.Field, field_result: bool) -> bool: + return all([_get_field_id(field) is not None, field_result]) -class _ConvertToIceberg(PyArrowSchemaVisitor[Union[IcebergType, Schema]]): - def _convert_fields(self, arrow_fields: Iterable[pa.Field], field_results: List[Optional[IcebergType]]) -> List[NestedField]: - fields = [] - for i, field in enumerate(arrow_fields): - field_id = _get_field_id(field) - field_doc = _get_field_doc(field) - field_type = field_results[i] - if field_type is not None and field_id is not None: - fields.append(NestedField(field_id, field.name, field_type, required=not field.nullable, doc=field_doc)) - return fields - - def schema(self, schema: pa.Schema, field_results: List[Optional[IcebergType]]) -> Schema: - return Schema(*self._convert_fields(schema, field_results)) - - def struct(self, struct: pa.StructType, field_results: List[Optional[IcebergType]]) -> IcebergType: - return StructType(*self._convert_fields(struct, field_results)) - - def list(self, list_type: pa.ListType, element_result: Optional[IcebergType]) -> Optional[IcebergType]: + def list(self, list_type: pa.ListType, element_result: bool) -> bool: element_field = list_type.value_field element_id = _get_field_id(element_field) - if element_result is not None and element_id is not None: - return ListType(element_id, element_result, element_required=not element_field.nullable) - return None + return element_result and element_id is not None - def map( - self, map_type: pa.MapType, key_result: Optional[IcebergType], value_result: Optional[IcebergType] - ) -> Optional[IcebergType]: + def map(self, map_type: pa.MapType, key_result: bool, value_result: bool) -> bool: key_field = map_type.key_field key_id = _get_field_id(key_field) value_field = map_type.item_field value_id = _get_field_id(value_field) - if key_result is not None and value_result is not None and key_id is not None and value_id is not None: - return MapType(key_id, key_result, value_id, value_result, value_required=not value_field.nullable) - return None + return all([key_id is not None, value_id is not None, key_result, value_result]) + + def primitive(self, primitive: pa.DataType) -> bool: + return True + + +class _ConvertToIceberg(PyArrowSchemaVisitor[Union[IcebergType, Schema]]): + """Converts PyArrowSchema to Iceberg Schema. Applies the IDs from name_mapping if provided.""" + + _field_names: List[str] + _name_mapping: Optional[NameMapping] + + def __init__(self, name_mapping: Optional[NameMapping] = None) -> None: + self._field_names = [] + self._name_mapping = name_mapping - def primitive(self, primitive: pa.DataType) -> IcebergType: + def _field_id(self, field: pa.Field) -> int: + if self._name_mapping: + return self._name_mapping.find(*self._field_names).field_id + elif (field_id := _get_field_id(field)) is not None: + return field_id + else: + raise ValueError(f"Cannot convert {field} to Iceberg Field as field_id is empty.") + + def schema(self, schema: pa.Schema, struct_result: StructType) -> Schema: + return Schema(*struct_result.fields) + + def struct(self, struct: pa.StructType, field_results: List[NestedField]) -> StructType: + return StructType(*field_results) + + def field(self, field: pa.Field, field_result: IcebergType) -> NestedField: + field_id = self._field_id(field) + field_doc = doc_str.decode() if (field.metadata and (doc_str := field.metadata.get(PYARROW_FIELD_DOC_KEY))) else None + field_type = field_result + return NestedField(field_id, field.name, field_type, required=not field.nullable, doc=field_doc) + + def list(self, list_type: pa.ListType, element_result: IcebergType) -> ListType: + element_field = list_type.value_field + self._field_names.append(LIST_ELEMENT_NAME) + element_id = self._field_id(element_field) + self._field_names.pop() + return ListType(element_id, element_result, element_required=not element_field.nullable) + + def map(self, map_type: pa.MapType, key_result: IcebergType, value_result: IcebergType) -> MapType: + key_field = map_type.key_field + self._field_names.append(MAP_KEY_NAME) + key_id = self._field_id(key_field) + self._field_names.pop() + value_field = map_type.item_field + self._field_names.append(MAP_VALUE_NAME) + value_id = self._field_id(value_field) + self._field_names.pop() + return MapType(key_id, key_result, value_id, value_result, value_required=not value_field.nullable) + + def primitive(self, primitive: pa.DataType) -> PrimitiveType: if pa.types.is_boolean(primitive): return BooleanType() - elif pa.types.is_int32(primitive): - return IntegerType() - elif pa.types.is_int64(primitive): - return LongType() + elif pa.types.is_integer(primitive): + width = primitive.bit_width + if width <= 32: + return IntegerType() + elif width <= 64: + return LongType() + else: + # Does not exist (yet) + raise TypeError(f"Unsupported integer type: {primitive}") elif pa.types.is_float32(primitive): return FloatType() elif pa.types.is_float64(primitive): @@ -786,7 +876,7 @@ def primitive(self, primitive: pa.DataType) -> IcebergType: elif isinstance(primitive, pa.Decimal128Type): primitive = cast(pa.Decimal128Type, primitive) return DecimalType(primitive.precision, primitive.scale) - elif pa.types.is_string(primitive): + elif pa.types.is_string(primitive) or pa.types.is_large_string(primitive): return StringType() elif pa.types.is_date32(primitive): return DateType() @@ -799,7 +889,7 @@ def primitive(self, primitive: pa.DataType) -> IcebergType: return TimestamptzType() elif primitive.tz is None: return TimestampType() - elif pa.types.is_binary(primitive): + elif pa.types.is_binary(primitive) or pa.types.is_large_binary(primitive): return BinaryType() elif pa.types.is_fixed_size_binary(primitive): primitive = cast(pa.FixedSizeBinaryType, primitive) @@ -807,6 +897,45 @@ def primitive(self, primitive: pa.DataType) -> IcebergType: raise TypeError(f"Unsupported type: {primitive}") + def before_field(self, field: pa.Field) -> None: + self._field_names.append(field.name) + + def after_field(self, field: pa.Field) -> None: + self._field_names.pop() + + def before_list_element(self, element: pa.Field) -> None: + self._field_names.append(LIST_ELEMENT_NAME) + + def after_list_element(self, element: pa.Field) -> None: + self._field_names.pop() + + def before_map_key(self, key: pa.Field) -> None: + self._field_names.append(MAP_KEY_NAME) + + def after_map_key(self, element: pa.Field) -> None: + self._field_names.pop() + + def before_map_value(self, value: pa.Field) -> None: + self._field_names.append(MAP_VALUE_NAME) + + def after_map_value(self, element: pa.Field) -> None: + self._field_names.pop() + + +class _ConvertToIcebergWithoutIDs(_ConvertToIceberg): + """ + Converts PyArrowSchema to Iceberg Schema with all -1 ids. + + The schema generated through this visitor should always be + used in conjunction with `new_table_metadata` function to + assign new field ids in order. This is currently used only + when creating an Iceberg Schema from a PyArrow schema when + creating a new Iceberg table. + """ + + def _field_id(self, field: pa.Field) -> int: + return -1 + def _task_to_table( fs: FileSystem, @@ -818,6 +947,7 @@ def _task_to_table( case_sensitive: bool, row_counts: List[int], limit: Optional[int] = None, + name_mapping: Optional[NameMapping] = None, ) -> Optional[pa.Table]: if limit and sum(row_counts) >= limit: return None @@ -830,9 +960,9 @@ def _task_to_table( schema_raw = None if metadata := physical_schema.metadata: schema_raw = metadata.get(ICEBERG_SCHEMA) - # TODO: if field_ids are not present, Name Mapping should be implemented to look them up in the table schema, - # see https://github.com/apache/iceberg/issues/7451 - file_schema = Schema.model_validate_json(schema_raw) if schema_raw is not None else pyarrow_to_schema(physical_schema) + file_schema = ( + Schema.model_validate_json(schema_raw) if schema_raw is not None else pyarrow_to_schema(physical_schema, name_mapping) + ) pyarrow_filter = None if bound_row_filter is not AlwaysTrue(): @@ -894,7 +1024,7 @@ def _task_to_table( def _read_all_delete_files(fs: FileSystem, tasks: Iterable[FileScanTask]) -> Dict[str, List[ChunkedArray]]: deletes_per_file: Dict[str, List[ChunkedArray]] = {} - unique_deletes = set(chain.from_iterable([task.delete_files for task in tasks])) + unique_deletes = set(itertools.chain.from_iterable([task.delete_files for task in tasks])) if len(unique_deletes) > 0: executor = ExecutorFactory.get_or_create() deletes_per_files: Iterator[Dict[str, ChunkedArray]] = executor.map( @@ -969,6 +1099,7 @@ def project_table( case_sensitive, row_counts, limit, + table.name_mapping(), ) for task in tasks ] @@ -1018,12 +1149,20 @@ class ArrowProjectionVisitor(SchemaWithPartnerVisitor[pa.Array, Optional[pa.Arra def __init__(self, file_schema: Schema): self.file_schema = file_schema - def cast_if_needed(self, field: NestedField, values: pa.Array) -> pa.Array: + def _cast_if_needed(self, field: NestedField, values: pa.Array) -> pa.Array: file_field = self.file_schema.find_field(field.field_id) if field.field_type.is_primitive and field.field_type != file_field.field_type: return values.cast(schema_to_pyarrow(promote(file_field.field_type, field.field_type))) return values + def _construct_field(self, field: NestedField, arrow_type: pa.DataType) -> pa.Field: + return pa.field( + name=field.name, + type=arrow_type, + nullable=field.optional, + metadata={DOC: field.doc} if field.doc is not None else None, + ) + def schema(self, schema: Schema, schema_partner: Optional[pa.Array], struct_result: Optional[pa.Array]) -> Optional[pa.Array]: return struct_result @@ -1036,13 +1175,13 @@ def struct( fields: List[pa.Field] = [] for field, field_array in zip(struct.fields, field_results): if field_array is not None: - array = self.cast_if_needed(field, field_array) + array = self._cast_if_needed(field, field_array) field_arrays.append(array) - fields.append(pa.field(field.name, array.type, field.optional)) + fields.append(self._construct_field(field, array.type)) elif field.optional: arrow_type = schema_to_pyarrow(field.field_type) field_arrays.append(pa.nulls(len(struct_array), type=arrow_type)) - fields.append(pa.field(field.name, arrow_type, field.optional)) + fields.append(self._construct_field(field, arrow_type)) else: raise ResolveError(f"Field is required, and could not be found in the file: {field}") @@ -1052,24 +1191,32 @@ def field(self, field: NestedField, _: Optional[pa.Array], field_array: Optional return field_array def list(self, list_type: ListType, list_array: Optional[pa.Array], value_array: Optional[pa.Array]) -> Optional[pa.Array]: - return ( - pa.ListArray.from_arrays(list_array.offsets, self.cast_if_needed(list_type.element_field, value_array)) - if isinstance(list_array, pa.ListArray) - else None - ) + if isinstance(list_array, pa.ListArray) and value_array is not None: + if isinstance(value_array, pa.StructArray): + # This can be removed once this has been fixed: + # https://github.com/apache/arrow/issues/38809 + list_array = pa.ListArray.from_arrays(list_array.offsets, value_array) + + arrow_field = pa.list_(self._construct_field(list_type.element_field, value_array.type)) + return list_array.cast(arrow_field) + else: + return None def map( self, map_type: MapType, map_array: Optional[pa.Array], key_result: Optional[pa.Array], value_result: Optional[pa.Array] ) -> Optional[pa.Array]: - return ( - pa.MapArray.from_arrays( - map_array.offsets, - self.cast_if_needed(map_type.key_field, key_result), - self.cast_if_needed(map_type.value_field, value_result), + if isinstance(map_array, pa.MapArray) and key_result is not None and value_result is not None: + arrow_field = pa.map_( + self._construct_field(map_type.key_field, key_result.type), + self._construct_field(map_type.value_field, value_result.type), ) - if isinstance(map_array, pa.MapArray) - else None - ) + if isinstance(value_result, pa.StructArray): + # Arrow does not allow reordering of fields, therefore we have to copy the array :( + return pa.MapArray.from_arrays(map_array.offsets, key_result, value_result, arrow_field) + else: + return map_array.cast(arrow_field) + else: + return None def primitive(self, _: PrimitiveType, array: Optional[pa.Array]) -> Optional[pa.Array]: return array @@ -1196,13 +1343,22 @@ def __init__(self, iceberg_type: PrimitiveType, physical_type_string: str, trunc def serialize(self, value: Any) -> bytes: return to_bytes(self.primitive_type, value) - def update_min(self, val: Any) -> None: - self.current_min = val if self.current_min is None else min(val, self.current_min) + def update_min(self, val: Optional[Any]) -> None: + if self.current_min is None: + self.current_min = val + elif val is not None: + self.current_min = min(val, self.current_min) - def update_max(self, val: Any) -> None: - self.current_max = val if self.current_max is None else max(val, self.current_max) + def update_max(self, val: Optional[Any]) -> None: + if self.current_max is None: + self.current_max = val + elif val is not None: + self.current_max = max(val, self.current_max) + + def min_as_bytes(self) -> Optional[bytes]: + if self.current_min is None: + return None - def min_as_bytes(self) -> bytes: return self.serialize( self.current_min if self.trunc_length is None @@ -1240,19 +1396,12 @@ class MetricModeTypes(Enum): FULL = "full" -DEFAULT_METRICS_MODE_KEY = "write.metadata.metrics.default" -COLUMN_METRICS_MODE_KEY_PREFIX = "write.metadata.metrics.column" - - @dataclass(frozen=True) class MetricsMode(Singleton): type: MetricModeTypes length: Optional[int] = None -_DEFAULT_METRICS_MODE = MetricsMode(MetricModeTypes.TRUNCATE, DEFAULT_TRUNCATION_LENGTH) - - def match_metrics_mode(mode: str) -> MetricsMode: sanitized_mode = mode.strip().lower() if sanitized_mode.startswith("truncate"): @@ -1286,12 +1435,14 @@ class PyArrowStatisticsCollector(PreOrderSchemaVisitor[List[StatisticsCollector] _field_id: int = 0 _schema: Schema _properties: Dict[str, str] - _default_mode: Optional[str] + _default_mode: str def __init__(self, schema: Schema, properties: Dict[str, str]): self._schema = schema self._properties = properties - self._default_mode = self._properties.get(DEFAULT_METRICS_MODE_KEY) + self._default_mode = self._properties.get( + TableProperties.DEFAULT_WRITE_METRICS_MODE, TableProperties.DEFAULT_WRITE_METRICS_MODE_DEFAULT + ) def schema(self, schema: Schema, struct_result: Callable[[], List[StatisticsCollector]]) -> List[StatisticsCollector]: return struct_result() @@ -1299,7 +1450,7 @@ def schema(self, schema: Schema, struct_result: Callable[[], List[StatisticsColl def struct( self, struct: StructType, field_results: List[Callable[[], List[StatisticsCollector]]] ) -> List[StatisticsCollector]: - return list(chain(*[result() for result in field_results])) + return list(itertools.chain(*[result() for result in field_results])) def field(self, field: NestedField, field_result: Callable[[], List[StatisticsCollector]]) -> List[StatisticsCollector]: self._field_id = field.field_id @@ -1326,12 +1477,9 @@ def primitive(self, primitive: PrimitiveType) -> List[StatisticsCollector]: if column_name is None: return [] - metrics_mode = _DEFAULT_METRICS_MODE - - if self._default_mode: - metrics_mode = match_metrics_mode(self._default_mode) + metrics_mode = match_metrics_mode(self._default_mode) - col_mode = self._properties.get(f"{COLUMN_METRICS_MODE_KEY_PREFIX}.{column_name}") + col_mode = self._properties.get(f"{TableProperties.METRICS_MODE_COLUMN_CONF_PREFIX}.{column_name}") if col_mode: metrics_mode = match_metrics_mode(col_mode) @@ -1391,7 +1539,7 @@ def schema(self, schema: Schema, struct_result: Callable[[], List[ID2ParquetPath return struct_result() def struct(self, struct: StructType, field_results: List[Callable[[], List[ID2ParquetPath]]]) -> List[ID2ParquetPath]: - return list(chain(*[result() for result in field_results])) + return list(itertools.chain(*[result() for result in field_results])) def field(self, field: NestedField, field_result: Callable[[], List[ID2ParquetPath]]) -> List[ID2ParquetPath]: self._field_id = field.field_id @@ -1447,9 +1595,8 @@ def parquet_path_to_id_mapping( def fill_parquet_file_metadata( - df: DataFile, + data_file: DataFile, parquet_metadata: pq.FileMetaData, - file_size: int, stats_columns: Dict[int, StatisticsCollector], parquet_column_mapping: Dict[str, int], ) -> None: @@ -1457,8 +1604,6 @@ def fill_parquet_file_metadata( Compute and fill the following fields of the DataFile object. - file_format - - record_count - - file_size_in_bytes - column_sizes - value_counts - null_value_counts @@ -1468,11 +1613,8 @@ def fill_parquet_file_metadata( - split_offsets Args: - df (DataFile): A DataFile object representing the Parquet file for which metadata is to be filled. + data_file (DataFile): A DataFile object representing the Parquet file for which metadata is to be filled. parquet_metadata (pyarrow.parquet.FileMetaData): A pyarrow metadata object. - file_size (int): The total compressed file size cannot be retrieved from the metadata and hence has to - be passed here. Depending on the kind of file system and pyarrow library call used, different - ways to obtain this value might be appropriate. stats_columns (Dict[int, StatisticsCollector]): The statistics gathering plan. It is required to set the mode for column metrics collection """ @@ -1569,13 +1711,106 @@ def fill_parquet_file_metadata( del upper_bounds[field_id] del null_value_counts[field_id] - df.file_format = FileFormat.PARQUET - df.record_count = parquet_metadata.num_rows - df.file_size_in_bytes = file_size - df.column_sizes = column_sizes - df.value_counts = value_counts - df.null_value_counts = null_value_counts - df.nan_value_counts = nan_value_counts - df.lower_bounds = lower_bounds - df.upper_bounds = upper_bounds - df.split_offsets = split_offsets + data_file.record_count = parquet_metadata.num_rows + data_file.column_sizes = column_sizes + data_file.value_counts = value_counts + data_file.null_value_counts = null_value_counts + data_file.nan_value_counts = nan_value_counts + data_file.lower_bounds = lower_bounds + data_file.upper_bounds = upper_bounds + data_file.split_offsets = split_offsets + + +def write_file(io: FileIO, table_metadata: TableMetadata, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: + task = next(tasks) + + try: + _ = next(tasks) + # If there are more tasks, raise an exception + raise NotImplementedError("Only unpartitioned writes are supported: https://github.com/apache/iceberg-python/issues/208") + except StopIteration: + pass + + parquet_writer_kwargs = _get_parquet_writer_kwargs(table_metadata.properties) + + file_path = f'{table_metadata.location}/data/{task.generate_data_file_filename("parquet")}' + schema = table_metadata.schema() + arrow_file_schema = schema_to_pyarrow(schema) + + fo = io.new_output(file_path) + row_group_size = PropertyUtil.property_as_int( + properties=table_metadata.properties, + property_name=TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES, + default=TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES_DEFAULT, + ) + with fo.create(overwrite=True) as fos: + with pq.ParquetWriter(fos, schema=arrow_file_schema, **parquet_writer_kwargs) as writer: + writer.write_table(task.df, row_group_size=row_group_size) + + data_file = DataFile( + content=DataFileContent.DATA, + file_path=file_path, + file_format=FileFormat.PARQUET, + partition=Record(), + file_size_in_bytes=len(fo), + # After this has been fixed: + # https://github.com/apache/iceberg-python/issues/271 + # sort_order_id=task.sort_order_id, + sort_order_id=None, + # Just copy these from the table for now + spec_id=table_metadata.default_spec_id, + equality_ids=None, + key_metadata=None, + ) + + fill_parquet_file_metadata( + data_file=data_file, + parquet_metadata=writer.writer.metadata, + stats_columns=compute_statistics_plan(schema, table_metadata.properties), + parquet_column_mapping=parquet_path_to_id_mapping(schema), + ) + return iter([data_file]) + + +ICEBERG_UNCOMPRESSED_CODEC = "uncompressed" +PYARROW_UNCOMPRESSED_CODEC = "none" + + +def _get_parquet_writer_kwargs(table_properties: Properties) -> Dict[str, Any]: + for key_pattern in [ + TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES, + TableProperties.PARQUET_PAGE_ROW_LIMIT, + TableProperties.PARQUET_BLOOM_FILTER_MAX_BYTES, + f"{TableProperties.PARQUET_BLOOM_FILTER_COLUMN_ENABLED_PREFIX}.*", + ]: + if unsupported_keys := fnmatch.filter(table_properties, key_pattern): + raise NotImplementedError(f"Parquet writer option(s) {unsupported_keys} not implemented") + + compression_codec = table_properties.get(TableProperties.PARQUET_COMPRESSION, TableProperties.PARQUET_COMPRESSION_DEFAULT) + compression_level = PropertyUtil.property_as_int( + properties=table_properties, + property_name=TableProperties.PARQUET_COMPRESSION_LEVEL, + default=TableProperties.PARQUET_COMPRESSION_LEVEL_DEFAULT, + ) + if compression_codec == ICEBERG_UNCOMPRESSED_CODEC: + compression_codec = PYARROW_UNCOMPRESSED_CODEC + + return { + "compression": compression_codec, + "compression_level": compression_level, + "data_page_size": PropertyUtil.property_as_int( + properties=table_properties, + property_name=TableProperties.PARQUET_PAGE_SIZE_BYTES, + default=TableProperties.PARQUET_PAGE_SIZE_BYTES_DEFAULT, + ), + "dictionary_pagesize_limit": PropertyUtil.property_as_int( + properties=table_properties, + property_name=TableProperties.PARQUET_DICT_SIZE_BYTES, + default=TableProperties.PARQUET_DICT_SIZE_BYTES_DEFAULT, + ), + "write_batch_size": PropertyUtil.property_as_int( + properties=table_properties, + property_name=TableProperties.PARQUET_PAGE_ROW_LIMIT, + default=TableProperties.PARQUET_PAGE_ROW_LIMIT_DEFAULT, + ), + } diff --git a/pyiceberg/manifest.py b/pyiceberg/manifest.py index ec7c218b1c..03dc3199bf 100644 --- a/pyiceberg/manifest.py +++ b/pyiceberg/manifest.py @@ -37,7 +37,7 @@ from pyiceberg.io import FileIO, InputFile, OutputFile from pyiceberg.partitioning import PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.typedef import Record +from pyiceberg.typedef import EMPTY_DICT, Record from pyiceberg.types import ( BinaryType, BooleanType, @@ -60,6 +60,8 @@ DEFAULT_BLOCK_SIZE = 67108864 # 64 * 1024 * 1024 DEFAULT_READ_VERSION: Literal[2] = 2 +INITIAL_SEQUENCE_NUMBER = 0 + class DataFileContent(int, Enum): DATA = 0 @@ -301,31 +303,28 @@ def _(partition_field_type: PrimitiveType) -> PrimitiveType: def data_file_with_partition(partition_type: StructType, format_version: Literal[1, 2]) -> StructType: - data_file_partition_type = StructType( - *[ - NestedField( - field_id=field.field_id, - name=field.name, - field_type=partition_field_to_data_file_partition_field(field.field_type), - ) - for field in partition_type.fields - ] - ) + data_file_partition_type = StructType(*[ + NestedField( + field_id=field.field_id, + name=field.name, + field_type=partition_field_to_data_file_partition_field(field.field_type), + required=field.required, + ) + for field in partition_type.fields + ]) - return StructType( - *[ - NestedField( - field_id=102, - name="partition", - field_type=data_file_partition_type, - required=True, - doc="Partition data tuple, schema based on the partition spec", - ) - if field.field_id == 102 - else field - for field in DATA_FILE_TYPE[format_version].fields - ] - ) + return StructType(*[ + NestedField( + field_id=102, + name="partition", + field_type=data_file_partition_type, + required=True, + doc="Partition data tuple, schema based on the partition spec", + ) + if field.field_id == 102 + else field + for field in DATA_FILE_TYPE[format_version].fields + ]) class DataFile(Record): @@ -410,12 +409,10 @@ def __eq__(self, other: Any) -> bool: def manifest_entry_schema_with_data_file(format_version: Literal[1, 2], data_file: StructType) -> Schema: - return Schema( - *[ - NestedField(2, "data_file", data_file, required=True) if field.field_id == 2 else field - for field in MANIFEST_ENTRY_SCHEMAS[format_version].fields - ] - ) + return Schema(*[ + NestedField(2, "data_file", data_file, required=True) if field.field_id == 2 else field + for field in MANIFEST_ENTRY_SCHEMAS[format_version].fields + ]) class ManifestEntry(Record): @@ -497,25 +494,42 @@ def construct_partition_summaries(spec: PartitionSpec, schema: Schema, partition return [field.to_summary() for field in field_stats] -MANIFEST_FILE_SCHEMA: Schema = Schema( - NestedField(500, "manifest_path", StringType(), required=True, doc="Location URI with FS scheme"), - NestedField(501, "manifest_length", LongType(), required=True), - NestedField(502, "partition_spec_id", IntegerType(), required=True), - NestedField(517, "content", IntegerType(), required=False, initial_default=ManifestContent.DATA), - NestedField(515, "sequence_number", LongType(), required=False, initial_default=0), - NestedField(516, "min_sequence_number", LongType(), required=False, initial_default=0), - NestedField(503, "added_snapshot_id", LongType(), required=False), - NestedField(504, "added_files_count", IntegerType(), required=False), - NestedField(505, "existing_files_count", IntegerType(), required=False), - NestedField(506, "deleted_files_count", IntegerType(), required=False), - NestedField(512, "added_rows_count", LongType(), required=False), - NestedField(513, "existing_rows_count", LongType(), required=False), - NestedField(514, "deleted_rows_count", LongType(), required=False), - NestedField(507, "partitions", ListType(508, PARTITION_FIELD_SUMMARY_TYPE, element_required=True), required=False), - NestedField(519, "key_metadata", BinaryType(), required=False), -) +MANIFEST_LIST_FILE_SCHEMAS: Dict[int, Schema] = { + 1: Schema( + NestedField(500, "manifest_path", StringType(), required=True, doc="Location URI with FS scheme"), + NestedField(501, "manifest_length", LongType(), required=True), + NestedField(502, "partition_spec_id", IntegerType(), required=True), + NestedField(503, "added_snapshot_id", LongType(), required=True), + NestedField(504, "added_files_count", IntegerType(), required=False), + NestedField(505, "existing_files_count", IntegerType(), required=False), + NestedField(506, "deleted_files_count", IntegerType(), required=False), + NestedField(512, "added_rows_count", LongType(), required=False), + NestedField(513, "existing_rows_count", LongType(), required=False), + NestedField(514, "deleted_rows_count", LongType(), required=False), + NestedField(507, "partitions", ListType(508, PARTITION_FIELD_SUMMARY_TYPE, element_required=True), required=False), + NestedField(519, "key_metadata", BinaryType(), required=False), + ), + 2: Schema( + NestedField(500, "manifest_path", StringType(), required=True, doc="Location URI with FS scheme"), + NestedField(501, "manifest_length", LongType(), required=True), + NestedField(502, "partition_spec_id", IntegerType(), required=True), + NestedField(517, "content", IntegerType(), required=True, initial_default=ManifestContent.DATA), + NestedField(515, "sequence_number", LongType(), required=True, initial_default=0), + NestedField(516, "min_sequence_number", LongType(), required=True, initial_default=0), + NestedField(503, "added_snapshot_id", LongType(), required=True), + NestedField(504, "added_files_count", IntegerType(), required=True), + NestedField(505, "existing_files_count", IntegerType(), required=True), + NestedField(506, "deleted_files_count", IntegerType(), required=True), + NestedField(512, "added_rows_count", LongType(), required=True), + NestedField(513, "existing_rows_count", LongType(), required=True), + NestedField(514, "deleted_rows_count", LongType(), required=True), + NestedField(507, "partitions", ListType(508, PARTITION_FIELD_SUMMARY_TYPE, element_required=True), required=False), + NestedField(519, "key_metadata", BinaryType(), required=False), + ), +} + +MANIFEST_LIST_FILE_STRUCTS = {format_version: schema.as_struct() for format_version, schema in MANIFEST_LIST_FILE_SCHEMAS.items()} -MANIFEST_FILE_SCHEMA_STRUCT = MANIFEST_FILE_SCHEMA.as_struct() POSITIONAL_DELETE_SCHEMA = Schema( NestedField(2147483546, "file_path", StringType()), NestedField(2147483545, "pos", IntegerType()) @@ -557,7 +571,7 @@ class ManifestFile(Record): key_metadata: Optional[bytes] def __init__(self, *data: Any, **named_data: Any) -> None: - super().__init__(*data, **{"struct": MANIFEST_FILE_SCHEMA_STRUCT, **named_data}) + super().__init__(*data, **{"struct": MANIFEST_LIST_FILE_STRUCTS[DEFAULT_READ_VERSION], **named_data}) def has_added_files(self) -> bool: return self.added_files_count is None or self.added_files_count > 0 @@ -602,7 +616,7 @@ def read_manifest_list(input_file: InputFile) -> Iterator[ManifestFile]: """ with AvroFile[ManifestFile]( input_file, - MANIFEST_FILE_SCHEMA, + MANIFEST_LIST_FILE_SCHEMAS[DEFAULT_READ_VERSION], read_types={-1: ManifestFile, 508: PartitionFieldSummary}, read_enums={517: ManifestContent}, ) as reader: @@ -665,7 +679,9 @@ class ManifestWriter(ABC): _min_data_sequence_number: Optional[int] _partitions: List[Record] - def __init__(self, spec: PartitionSpec, schema: Schema, output_file: OutputFile, snapshot_id: int, meta: Dict[str, str]): + def __init__( + self, spec: PartitionSpec, schema: Schema, output_file: OutputFile, snapshot_id: int, meta: Dict[str, str] = EMPTY_DICT + ) -> None: self.closed = False self._spec = spec self._schema = schema @@ -699,13 +715,11 @@ def __exit__( self._writer.__exit__(exc_type, exc_value, traceback) @abstractmethod - def content(self) -> ManifestContent: - ... + def content(self) -> ManifestContent: ... @property @abstractmethod - def version(self) -> Literal[1, 2]: - ... + def version(self) -> Literal[1, 2]: ... def _with_partition(self, format_version: Literal[1, 2]) -> Schema: data_file_type = data_file_with_partition( @@ -723,8 +737,7 @@ def new_writer(self) -> AvroOutputFile[ManifestEntry]: ) @abstractmethod - def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: - ... + def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: ... def to_manifest_file(self) -> ManifestFile: """Return the manifest file.""" @@ -746,7 +759,7 @@ def to_manifest_file(self) -> ManifestFile: existing_rows_count=self._existing_rows, deleted_rows_count=self._deleted_rows, partitions=construct_partition_summaries(self._spec, self._schema, self._partitions), - key_metadatas=None, + key_metadata=None, ) def add_entry(self, entry: ManifestEntry) -> ManifestWriter: @@ -845,13 +858,15 @@ def write_manifest( class ManifestListWriter(ABC): + _format_version: Literal[1, 2] _output_file: OutputFile _meta: Dict[str, str] _manifest_files: List[ManifestFile] _commit_snapshot_id: int _writer: AvroOutputFile[ManifestFile] - def __init__(self, output_file: OutputFile, meta: Dict[str, str]): + def __init__(self, format_version: Literal[1, 2], output_file: OutputFile, meta: Dict[str, Any]): + self._format_version = format_version self._output_file = output_file self._meta = meta self._manifest_files = [] @@ -859,7 +874,11 @@ def __init__(self, output_file: OutputFile, meta: Dict[str, str]): def __enter__(self) -> ManifestListWriter: """Open the writer for writing.""" self._writer = AvroOutputFile[ManifestFile]( - output_file=self._output_file, file_schema=MANIFEST_FILE_SCHEMA, schema_name="manifest_file", metadata=self._meta + output_file=self._output_file, + record_schema=MANIFEST_LIST_FILE_SCHEMAS[DEFAULT_READ_VERSION], + file_schema=MANIFEST_LIST_FILE_SCHEMAS[self._format_version], + schema_name="manifest_file", + metadata=self._meta, ) self._writer.__enter__() return self @@ -875,8 +894,7 @@ def __exit__( return @abstractmethod - def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: - ... + def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: ... def add_manifests(self, manifest_files: List[ManifestFile]) -> ManifestListWriter: self._writer.write_block([self.prepare_manifest(manifest_file) for manifest_file in manifest_files]) @@ -884,9 +902,11 @@ def add_manifests(self, manifest_files: List[ManifestFile]) -> ManifestListWrite class ManifestListWriterV1(ManifestListWriter): - def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: int): + def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: Optional[int]): super().__init__( - output_file, {"snapshot-id": str(snapshot_id), "parent-snapshot-id": str(parent_snapshot_id), "format-version": "1"} + format_version=1, + output_file=output_file, + meta={"snapshot-id": str(snapshot_id), "parent-snapshot-id": str(parent_snapshot_id), "format-version": "1"}, ) def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: @@ -899,10 +919,11 @@ class ManifestListWriterV2(ManifestListWriter): _commit_snapshot_id: int _sequence_number: int - def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: int, sequence_number: int): + def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: Optional[int], sequence_number: int): super().__init__( - output_file, - { + format_version=2, + output_file=output_file, + meta={ "snapshot-id": str(snapshot_id), "parent-snapshot-id": str(parent_snapshot_id), "sequence-number": str(sequence_number), @@ -920,7 +941,7 @@ def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: # To validate this, check that the snapshot id matches the current commit if self._commit_snapshot_id != wrapped_manifest_file.added_snapshot_id: raise ValueError( - f"Found unassigned sequence number for a manifest from snapshot: {wrapped_manifest_file.added_snapshot_id}" + f"Found unassigned sequence number for a manifest from snapshot: {self._commit_snapshot_id} != {wrapped_manifest_file.added_snapshot_id}" ) wrapped_manifest_file.sequence_number = self._sequence_number @@ -936,11 +957,17 @@ def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: def write_manifest_list( - format_version: Literal[1, 2], output_file: OutputFile, snapshot_id: int, parent_snapshot_id: int, sequence_number: int + format_version: Literal[1, 2], + output_file: OutputFile, + snapshot_id: int, + parent_snapshot_id: Optional[int], + sequence_number: Optional[int], ) -> ManifestListWriter: if format_version == 1: return ManifestListWriterV1(output_file, snapshot_id, parent_snapshot_id) elif format_version == 2: + if sequence_number is None: + raise ValueError(f"Sequence-number is required for V2 tables: {sequence_number}") return ManifestListWriterV2(output_file, snapshot_id, parent_snapshot_id, sequence_number) else: raise ValueError(f"Cannot write manifest list for table version: {format_version}") diff --git a/pyiceberg/partitioning.py b/pyiceberg/partitioning.py index f6307f0f8c..a6692b325e 100644 --- a/pyiceberg/partitioning.py +++ b/pyiceberg/partitioning.py @@ -16,14 +16,21 @@ # under the License. from __future__ import annotations -from functools import cached_property +import uuid +from abc import ABC, abstractmethod +from dataclasses import dataclass +from datetime import date, datetime +from functools import cached_property, singledispatch from typing import ( Any, Dict, + Generic, List, Optional, Tuple, + TypeVar, ) +from urllib.parse import quote from pydantic import ( BeforeValidator, @@ -34,9 +41,30 @@ from typing_extensions import Annotated from pyiceberg.schema import Schema -from pyiceberg.transforms import Transform, parse_transform -from pyiceberg.typedef import IcebergBaseModel -from pyiceberg.types import NestedField, StructType +from pyiceberg.transforms import ( + BucketTransform, + DayTransform, + HourTransform, + IdentityTransform, + Transform, + TruncateTransform, + UnknownTransform, + VoidTransform, + YearTransform, + parse_transform, +) +from pyiceberg.typedef import IcebergBaseModel, Record +from pyiceberg.types import ( + DateType, + IcebergType, + NestedField, + PrimitiveType, + StructType, + TimestampType, + TimestamptzType, + UUIDType, +) +from pyiceberg.utils.datetime import date_to_days, datetime_to_micros INITIAL_PARTITION_SPEC_ID = 0 PARTITION_FIELD_ID_START: int = 1000 @@ -143,7 +171,7 @@ def is_unpartitioned(self) -> bool: def last_assigned_field_id(self) -> int: if self.fields: return max(pf.field_id for pf in self.fields) - return PARTITION_FIELD_ID_START + return PARTITION_FIELD_ID_START - 1 @cached_property def source_id_to_fields_map(self) -> Dict[int, List[PartitionField]]: @@ -190,9 +218,27 @@ def partition_type(self, schema: Schema) -> StructType: for field in self.fields: source_type = schema.find_type(field.source_id) result_type = field.transform.result_type(source_type) - nested_fields.append(NestedField(field.field_id, field.name, result_type, required=False)) + required = schema.find_field(field.source_id).required + nested_fields.append(NestedField(field.field_id, field.name, result_type, required=required)) return StructType(*nested_fields) + def partition_to_path(self, data: Record, schema: Schema) -> str: + partition_type = self.partition_type(schema) + field_types = partition_type.fields + + field_strs = [] + value_strs = [] + for pos, value in enumerate(data.record_fields()): + partition_field = self.fields[pos] + value_str = partition_field.transform.to_human_string(field_types[pos].field_type, value=value) + + value_str = quote(value_str, safe='') + value_strs.append(value_str) + field_strs.append(partition_field.name) + + path = "/".join([field_str + "=" + value_str for field_str, value_str in zip(field_strs, value_strs)]) + return path + UNPARTITIONED_PARTITION_SPEC = PartitionSpec(spec_id=0) @@ -215,3 +261,164 @@ def assign_fresh_partition_spec_ids(spec: PartitionSpec, old_schema: Schema, fre ) ) return PartitionSpec(*partition_fields, spec_id=INITIAL_PARTITION_SPEC_ID) + + +T = TypeVar("T") + + +class PartitionSpecVisitor(Generic[T], ABC): + @abstractmethod + def identity(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit identity partition field.""" + + @abstractmethod + def bucket(self, field_id: int, source_name: str, source_id: int, num_buckets: int) -> T: + """Visit bucket partition field.""" + + @abstractmethod + def truncate(self, field_id: int, source_name: str, source_id: int, width: int) -> T: + """Visit truncate partition field.""" + + @abstractmethod + def year(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit year partition field.""" + + @abstractmethod + def month(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit month partition field.""" + + @abstractmethod + def day(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit day partition field.""" + + @abstractmethod + def hour(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit hour partition field.""" + + @abstractmethod + def always_null(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit void partition field.""" + + @abstractmethod + def unknown(self, field_id: int, source_name: str, source_id: int, transform: str) -> T: + """Visit unknown partition field.""" + raise ValueError(f"Unknown transform is not supported: {transform}") + + +class _PartitionNameGenerator(PartitionSpecVisitor[str]): + def identity(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + + def bucket(self, field_id: int, source_name: str, source_id: int, num_buckets: int) -> str: + return f"{source_name}_bucket_{num_buckets}" + + def truncate(self, field_id: int, source_name: str, source_id: int, width: int) -> str: + return source_name + "_trunc_" + str(width) + + def year(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_year" + + def month(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_month" + + def day(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_day" + + def hour(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_hour" + + def always_null(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_null" + + def unknown(self, field_id: int, source_name: str, source_id: int, transform: str) -> str: + return super().unknown(field_id, source_name, source_id, transform) + + +R = TypeVar("R") + + +@singledispatch +def _visit(spec: PartitionSpec, schema: Schema, visitor: PartitionSpecVisitor[R]) -> List[R]: + return [_visit_partition_field(schema, field, visitor) for field in spec.fields] + + +def _visit_partition_field(schema: Schema, field: PartitionField, visitor: PartitionSpecVisitor[R]) -> R: + source_name = schema.find_column_name(field.source_id) + if not source_name: + raise ValueError(f"Could not find field with id {field.source_id}") + + transform = field.transform + if isinstance(transform, IdentityTransform): + return visitor.identity(field.field_id, source_name, field.source_id) + elif isinstance(transform, BucketTransform): + return visitor.bucket(field.field_id, source_name, field.source_id, transform.num_buckets) + elif isinstance(transform, TruncateTransform): + return visitor.truncate(field.field_id, source_name, field.source_id, transform.width) + elif isinstance(transform, DayTransform): + return visitor.day(field.field_id, source_name, field.source_id) + elif isinstance(transform, HourTransform): + return visitor.hour(field.field_id, source_name, field.source_id) + elif isinstance(transform, YearTransform): + return visitor.year(field.field_id, source_name, field.source_id) + elif isinstance(transform, VoidTransform): + return visitor.always_null(field.field_id, source_name, field.source_id) + elif isinstance(transform, UnknownTransform): + return visitor.unknown(field.field_id, source_name, field.source_id, repr(transform)) + else: + raise ValueError(f"Unknown transform {transform}") + + +@dataclass(frozen=True) +class PartitionFieldValue: + field: PartitionField + value: Any + + +@dataclass(frozen=True) +class PartitionKey: + raw_partition_field_values: List[PartitionFieldValue] + partition_spec: PartitionSpec + schema: Schema + + @cached_property + def partition(self) -> Record: # partition key transformed with iceberg internal representation as input + iceberg_typed_key_values = {} + for raw_partition_field_value in self.raw_partition_field_values: + partition_fields = self.partition_spec.source_id_to_fields_map[raw_partition_field_value.field.source_id] + if len(partition_fields) != 1: + raise ValueError("partition_fields must contain exactly one field.") + partition_field = partition_fields[0] + iceberg_type = self.schema.find_field(name_or_id=raw_partition_field_value.field.source_id).field_type + iceberg_typed_value = _to_partition_representation(iceberg_type, raw_partition_field_value.value) + transformed_value = partition_field.transform.transform(iceberg_type)(iceberg_typed_value) + iceberg_typed_key_values[partition_field.name] = transformed_value + return Record(**iceberg_typed_key_values) + + def to_path(self) -> str: + return self.partition_spec.partition_to_path(self.partition, self.schema) + + +@singledispatch +def _to_partition_representation(type: IcebergType, value: Any) -> Any: + return TypeError(f"Unsupported partition field type: {type}") + + +@_to_partition_representation.register(TimestampType) +@_to_partition_representation.register(TimestamptzType) +def _(type: IcebergType, value: Optional[datetime]) -> Optional[int]: + return datetime_to_micros(value) if value is not None else None + + +@_to_partition_representation.register(DateType) +def _(type: IcebergType, value: Optional[date]) -> Optional[int]: + return date_to_days(value) if value is not None else None + + +@_to_partition_representation.register(UUIDType) +def _(type: IcebergType, value: Optional[uuid.UUID]) -> Optional[str]: + return str(value) if value is not None else None + + +@_to_partition_representation.register(PrimitiveType) +def _(type: IcebergType, value: Optional[Any]) -> Optional[Any]: + return value diff --git a/pyiceberg/schema.py b/pyiceberg/schema.py index b61e4678b9..e805895a7b 100644 --- a/pyiceberg/schema.py +++ b/pyiceberg/schema.py @@ -22,6 +22,7 @@ from dataclasses import dataclass from functools import cached_property, partial, singledispatch from typing import ( + TYPE_CHECKING, Any, Callable, Dict, @@ -62,6 +63,11 @@ UUIDType, ) +if TYPE_CHECKING: + from pyiceberg.table.name_mapping import ( + NameMapping, + ) + T = TypeVar("T") P = TypeVar("P") @@ -221,6 +227,12 @@ def find_type(self, name_or_id: Union[str, int], case_sensitive: bool = True) -> def highest_field_id(self) -> int: return max(self._lazy_id_to_name.keys(), default=0) + @cached_property + def name_mapping(self) -> NameMapping: + from pyiceberg.table.name_mapping import create_mapping_from_schema + + return create_mapping_from_schema(self) + def find_column_name(self, column_id: int) -> Optional[str]: """Find a column name given a column ID. @@ -1221,50 +1233,57 @@ def assign_fresh_schema_ids(schema_or_type: Union[Schema, IcebergType], next_id: class _SetFreshIDs(PreOrderSchemaVisitor[IcebergType]): """Traverses the schema and assigns monotonically increasing ids.""" - reserved_ids: Dict[int, int] + old_id_to_new_id: Dict[int, int] def __init__(self, next_id_func: Optional[Callable[[], int]] = None) -> None: - self.reserved_ids = {} + self.old_id_to_new_id = {} counter = itertools.count(1) self.next_id_func = next_id_func if next_id_func is not None else lambda: next(counter) - def _get_and_increment(self) -> int: - return self.next_id_func() + def _get_and_increment(self, current_id: int) -> int: + new_id = self.next_id_func() + self.old_id_to_new_id[current_id] = new_id + return new_id def schema(self, schema: Schema, struct_result: Callable[[], StructType]) -> Schema: - # First we keep the original identifier_field_ids here, we remap afterwards - fields = struct_result().fields - return Schema(*fields, identifier_field_ids=[self.reserved_ids[field_id] for field_id in schema.identifier_field_ids]) + return Schema( + *struct_result().fields, + identifier_field_ids=[self.old_id_to_new_id[field_id] for field_id in schema.identifier_field_ids], + ) def struct(self, struct: StructType, field_results: List[Callable[[], IcebergType]]) -> StructType: - # assign IDs for this struct's fields first - self.reserved_ids.update({field.field_id: self._get_and_increment() for field in struct.fields}) - return StructType(*[field() for field in field_results]) + new_ids = [self._get_and_increment(field.field_id) for field in struct.fields] + new_fields = [] + for field_id, field, field_type in zip(new_ids, struct.fields, field_results): + new_fields.append( + NestedField( + field_id=field_id, + name=field.name, + field_type=field_type(), + required=field.required, + doc=field.doc, + ) + ) + return StructType(*new_fields) def field(self, field: NestedField, field_result: Callable[[], IcebergType]) -> IcebergType: - return NestedField( - field_id=self.reserved_ids[field.field_id], - name=field.name, - field_type=field_result(), - required=field.required, - doc=field.doc, - ) + return field_result() def list(self, list_type: ListType, element_result: Callable[[], IcebergType]) -> ListType: - self.reserved_ids[list_type.element_id] = self._get_and_increment() + element_id = self._get_and_increment(list_type.element_id) return ListType( - element_id=self.reserved_ids[list_type.element_id], + element_id=element_id, element=element_result(), element_required=list_type.element_required, ) def map(self, map_type: MapType, key_result: Callable[[], IcebergType], value_result: Callable[[], IcebergType]) -> MapType: - self.reserved_ids[map_type.key_id] = self._get_and_increment() - self.reserved_ids[map_type.value_id] = self._get_and_increment() + key_id = self._get_and_increment(map_type.key_id) + value_id = self._get_and_increment(map_type.value_id) return MapType( - key_id=self.reserved_ids[map_type.key_id], + key_id=key_id, key_type=key_result(), - value_id=self.reserved_ids[map_type.value_id], + value_id=value_id, value_type=value_result(), value_required=map_type.value_required, ) diff --git a/pyiceberg/serializers.py b/pyiceberg/serializers.py index 6a580ead80..e2994884c6 100644 --- a/pyiceberg/serializers.py +++ b/pyiceberg/serializers.py @@ -24,6 +24,7 @@ from pyiceberg.io import InputFile, InputStream, OutputFile from pyiceberg.table.metadata import TableMetadata, TableMetadataUtil from pyiceberg.typedef import UTF8 +from pyiceberg.utils.config import Config GZIP = "gzip" @@ -127,6 +128,9 @@ def table_metadata(metadata: TableMetadata, output_file: OutputFile, overwrite: overwrite (bool): Where to overwrite the file if it already exists. Defaults to `False`. """ with output_file.create(overwrite=overwrite) as output_stream: - json_bytes = metadata.model_dump_json().encode(UTF8) + # We need to serialize None values, in order to dump `None` current-snapshot-id as `-1` + exclude_none = False if Config().get_bool("legacy-current-snapshot-id") else True + + json_bytes = metadata.model_dump_json(exclude_none=exclude_none).encode(UTF8) json_bytes = Compressor.get_compressor(output_file.location).bytes_compressor()(json_bytes) output_stream.write(json_bytes) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 40b37cc248..c5dd2e8e70 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -19,6 +19,7 @@ import datetime import itertools import uuid +import warnings from abc import ABC, abstractmethod from copy import copy from dataclasses import dataclass @@ -30,6 +31,7 @@ Any, Callable, Dict, + Generic, Iterable, List, Literal, @@ -40,20 +42,20 @@ Union, ) -from pydantic import Field, SerializeAsAny +from pydantic import Field, SerializeAsAny, field_validator from sortedcontainers import SortedList from typing_extensions import Annotated +import pyiceberg.expressions.parser as parser +import pyiceberg.expressions.visitors as visitors from pyiceberg.exceptions import CommitFailedException, ResolveError, ValidationError from pyiceberg.expressions import ( AlwaysTrue, And, BooleanExpression, EqualTo, - parser, - visitors, + Reference, ) -from pyiceberg.expressions.visitors import _InclusiveMetricsEvaluator, inclusive_projection from pyiceberg.io import FileIO, load_file_io from pyiceberg.manifest import ( POSITIONAL_DELETE_SCHEMA, @@ -61,15 +63,28 @@ DataFileContent, ManifestContent, ManifestEntry, + ManifestEntryStatus, ManifestFile, + write_manifest, + write_manifest_list, +) +from pyiceberg.partitioning import ( + INITIAL_PARTITION_SPEC_ID, + PARTITION_FIELD_ID_START, + PartitionField, + PartitionSpec, + _PartitionNameGenerator, + _visit_partition_field, ) -from pyiceberg.partitioning import PartitionSpec from pyiceberg.schema import ( + PartnerAccessor, Schema, SchemaVisitor, + SchemaWithPartnerVisitor, assign_fresh_schema_ids, promote, visit, + visit_with_partner, ) from pyiceberg.table.metadata import ( INITIAL_SEQUENCE_NUMBER, @@ -77,9 +92,22 @@ TableMetadata, TableMetadataUtil, ) +from pyiceberg.table.name_mapping import ( + NameMapping, + parse_mapping_from_json, + update_mapping, +) from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef -from pyiceberg.table.snapshots import Snapshot, SnapshotLogEntry +from pyiceberg.table.snapshots import ( + Operation, + Snapshot, + SnapshotLogEntry, + SnapshotSummaryCollector, + Summary, + update_snapshot_summaries, +) from pyiceberg.table.sorting import SortOrder +from pyiceberg.transforms import IdentityTransform, TimeTransform, Transform, VoidTransform from pyiceberg.typedef import ( EMPTY_DICT, IcebergBaseModel, @@ -95,11 +123,13 @@ NestedField, PrimitiveType, StructType, + transform_dict_value_to_str, ) from pyiceberg.utils.concurrent import ExecutorFactory from pyiceberg.utils.datetime import datetime_to_millis if TYPE_CHECKING: + import daft import pandas as pd import pyarrow as pa import ray @@ -107,24 +137,116 @@ from pyiceberg.catalog import Catalog + ALWAYS_TRUE = AlwaysTrue() TABLE_ROOT_ID = -1 +_JAVA_LONG_MAX = 9223372036854775807 + + +def _check_schema(table_schema: Schema, other_schema: "pa.Schema") -> None: + from pyiceberg.io.pyarrow import _pyarrow_to_schema_without_ids, pyarrow_to_schema + + name_mapping = table_schema.name_mapping + try: + task_schema = pyarrow_to_schema(other_schema, name_mapping=name_mapping) + except ValueError as e: + other_schema = _pyarrow_to_schema_without_ids(other_schema) + additional_names = set(other_schema.column_names) - set(table_schema.column_names) + raise ValueError( + f"PyArrow table contains more columns: {', '.join(sorted(additional_names))}. Update the schema first (hint, use union_by_name)." + ) from e + + if table_schema.as_struct() != task_schema.as_struct(): + from rich.console import Console + from rich.table import Table as RichTable + + console = Console(record=True) + + rich_table = RichTable(show_header=True, header_style="bold") + rich_table.add_column("") + rich_table.add_column("Table field") + rich_table.add_column("Dataframe field") + + for lhs in table_schema.fields: + try: + rhs = task_schema.find_field(lhs.field_id) + rich_table.add_row("✅" if lhs == rhs else "❌", str(lhs), str(rhs)) + except ValueError: + rich_table.add_row("❌", str(lhs), "Missing") + + console.print(rich_table) + raise ValueError(f"Mismatch in fields:\n{console.export_text()}") + + +class TableProperties: + PARQUET_ROW_GROUP_SIZE_BYTES = "write.parquet.row-group-size-bytes" + PARQUET_ROW_GROUP_SIZE_BYTES_DEFAULT = 128 * 1024 * 1024 # 128 MB + + PARQUET_ROW_GROUP_LIMIT = "write.parquet.row-group-limit" + PARQUET_ROW_GROUP_LIMIT_DEFAULT = 128 * 1024 * 1024 # 128 MB + + PARQUET_PAGE_SIZE_BYTES = "write.parquet.page-size-bytes" + PARQUET_PAGE_SIZE_BYTES_DEFAULT = 1024 * 1024 # 1 MB + + PARQUET_PAGE_ROW_LIMIT = "write.parquet.page-row-limit" + PARQUET_PAGE_ROW_LIMIT_DEFAULT = 20000 + + PARQUET_DICT_SIZE_BYTES = "write.parquet.dict-size-bytes" + PARQUET_DICT_SIZE_BYTES_DEFAULT = 2 * 1024 * 1024 # 2 MB + + PARQUET_COMPRESSION = "write.parquet.compression-codec" + PARQUET_COMPRESSION_DEFAULT = "zstd" + + PARQUET_COMPRESSION_LEVEL = "write.parquet.compression-level" + PARQUET_COMPRESSION_LEVEL_DEFAULT = None + + PARQUET_BLOOM_FILTER_MAX_BYTES = "write.parquet.bloom-filter-max-bytes" + PARQUET_BLOOM_FILTER_MAX_BYTES_DEFAULT = 1024 * 1024 + + PARQUET_BLOOM_FILTER_COLUMN_ENABLED_PREFIX = "write.parquet.bloom-filter-enabled.column" + + DEFAULT_WRITE_METRICS_MODE = "write.metadata.metrics.default" + DEFAULT_WRITE_METRICS_MODE_DEFAULT = "truncate(16)" + + METRICS_MODE_COLUMN_CONF_PREFIX = "write.metadata.metrics.column" + + DEFAULT_NAME_MAPPING = "schema.name-mapping.default" + FORMAT_VERSION = "format-version" + DEFAULT_FORMAT_VERSION = 2 + + +class PropertyUtil: + @staticmethod + def property_as_int(properties: Dict[str, str], property_name: str, default: Optional[int] = None) -> Optional[int]: + if value := properties.get(property_name): + try: + return int(value) + except ValueError as e: + raise ValueError(f"Could not parse table property {property_name} to an integer: {value}") from e + else: + return default + class Transaction: _table: Table + table_metadata: TableMetadata + _autocommit: bool _updates: Tuple[TableUpdate, ...] _requirements: Tuple[TableRequirement, ...] - def __init__( - self, - table: Table, - actions: Optional[Tuple[TableUpdate, ...]] = None, - requirements: Optional[Tuple[TableRequirement, ...]] = None, - ): + def __init__(self, table: Table, autocommit: bool = False): + """Open a transaction to stage and commit changes to a table. + + Args: + table: The table that will be altered. + autocommit: Option to automatically commit the changes when they are staged. + """ + self.table_metadata = table.metadata self._table = table - self._updates = actions or () - self._requirements = requirements or () + self._autocommit = autocommit + self._updates = () + self._requirements = () def __enter__(self) -> Transaction: """Start a transaction to update the table.""" @@ -132,49 +254,23 @@ def __enter__(self) -> Transaction: def __exit__(self, _: Any, value: Any, traceback: Any) -> None: """Close and commit the transaction.""" - fresh_table = self.commit_transaction() - # Update the new data in place - self._table.metadata = fresh_table.metadata - self._table.metadata_location = fresh_table.metadata_location - - def _append_updates(self, *new_updates: TableUpdate) -> Transaction: - """Append updates to the set of staged updates. + self.commit_transaction() - Args: - *new_updates: Any new updates. - - Raises: - ValueError: When the type of update is not unique. - - Returns: - Transaction object with the new updates appended. - """ - for new_update in new_updates: - # explicitly get type of new_update as new_update is an instantiated class - type_new_update = type(new_update) - if any(isinstance(update, type_new_update) for update in self._updates): - raise ValueError(f"Updates in a single commit need to be unique, duplicate: {type_new_update}") - self._updates = self._updates + new_updates - return self + def _apply(self, updates: Tuple[TableUpdate, ...], requirements: Tuple[TableRequirement, ...] = ()) -> Transaction: + """Check if the requirements are met, and applies the updates to the metadata.""" + for requirement in requirements: + requirement.validate(self.table_metadata) - def _append_requirements(self, *new_requirements: TableRequirement) -> Transaction: - """Append requirements to the set of staged requirements. + self._updates += updates + self._requirements += requirements - Args: - *new_requirements: Any new requirements. + self.table_metadata = update_table_metadata(self.table_metadata, updates) - Raises: - ValueError: When the type of requirement is not unique. + if self._autocommit: + self.commit_transaction() + self._updates = () + self._requirements = () - Returns: - Transaction object with the new requirements appended. - """ - for new_requirement in new_requirements: - # explicitly get type of new_update as requirement is an instantiated class - type_new_requirement = type(new_requirement) - if any(isinstance(requirement, type_new_requirement) for requirement in self._requirements): - raise ValueError(f"Requirements in a single commit need to be unique, duplicate: {type_new_requirement}") - self._requirements = self._requirements + new_requirements return self def upgrade_table_version(self, format_version: Literal[1, 2]) -> Transaction: @@ -191,31 +287,61 @@ def upgrade_table_version(self, format_version: Literal[1, 2]) -> Transaction: if format_version < self._table.metadata.format_version: raise ValueError(f"Cannot downgrade v{self._table.metadata.format_version} table to v{format_version}") + if format_version > self._table.metadata.format_version: - return self._append_updates(UpgradeFormatVersionUpdate(format_version=format_version)) - else: - return self + return self._apply((UpgradeFormatVersionUpdate(format_version=format_version),)) + + return self - def set_properties(self, **updates: str) -> Transaction: + def set_properties(self, properties: Properties = EMPTY_DICT, **kwargs: Any) -> Transaction: """Set properties. When a property is already set, it will be overwritten. Args: - updates: The properties set on the table. + properties: The properties set on the table. + kwargs: properties can also be pass as kwargs. Returns: The alter table builder. """ - return self._append_updates(SetPropertiesUpdate(updates=updates)) + if properties and kwargs: + raise ValueError("Cannot pass both properties and kwargs") + updates = properties or kwargs + return self._apply((SetPropertiesUpdate(updates=updates),)) - def update_schema(self) -> UpdateSchema: + def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: """Create a new UpdateSchema to alter the columns of this table. + Args: + allow_incompatible_changes: If changes are allowed that might break downstream consumers. + case_sensitive: If field names are case-sensitive. + Returns: A new UpdateSchema. """ - return UpdateSchema(self._table, self) + return UpdateSchema( + self, + allow_incompatible_changes=allow_incompatible_changes, + case_sensitive=case_sensitive, + name_mapping=self._table.name_mapping(), + ) + + def update_snapshot(self) -> UpdateSnapshot: + """Create a new UpdateSnapshot to produce a new snapshot for the table. + + Returns: + A new UpdateSnapshot + """ + return UpdateSnapshot(self, io=self._table.io) + + def update_spec(self) -> UpdateSpec: + """Create a new UpdateSpec to update the partitioning of the table. + + Returns: + A new UpdateSpec. + """ + return UpdateSpec(self) def remove_properties(self, *removals: str) -> Transaction: """Remove properties. @@ -226,7 +352,7 @@ def remove_properties(self, *removals: str) -> Transaction: Returns: The alter table builder. """ - return self._append_updates(RemovePropertiesUpdate(removals=removals)) + return self._apply((RemovePropertiesUpdate(removals=removals),)) def update_location(self, location: str) -> Transaction: """Set the new table location. @@ -245,7 +371,6 @@ def commit_transaction(self) -> Table: Returns: The table with the updates applied. """ - # Strip the catalog name if len(self._updates) > 0: self._table._do_commit( # pylint: disable=W0212 updates=self._updates, @@ -354,6 +479,10 @@ class SetPropertiesUpdate(TableUpdate): action: TableUpdateAction = TableUpdateAction.set_properties updates: Dict[str, str] + @field_validator('updates', mode='before') + def transform_properties_dict_value_to_str(cls, properties: Properties) -> Dict[str, str]: + return transform_dict_value_to_str(properties) + class RemovePropertiesUpdate(TableUpdate): action: TableUpdateAction = TableUpdateAction.remove_properties @@ -381,6 +510,13 @@ def is_added_schema(self, schema_id: int) -> bool: update.schema_.schema_id == schema_id for update in self._updates if update.action == TableUpdateAction.add_schema ) + def is_added_sort_order(self, sort_order_id: int) -> bool: + return any( + update.sort_order.order_id == sort_order_id + for update in self._updates + if update.action == TableUpdateAction.add_sort_order + ) + @singledispatch def _apply_table_update(update: TableUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: @@ -414,6 +550,31 @@ def _(update: UpgradeFormatVersionUpdate, base_metadata: TableMetadata, context: return TableMetadataUtil.parse_obj(updated_metadata_data) +@_apply_table_update.register(SetPropertiesUpdate) +def _(update: SetPropertiesUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + if len(update.updates) == 0: + return base_metadata + + properties = dict(base_metadata.properties) + properties.update(update.updates) + + context.add_update(update) + return base_metadata.model_copy(update={"properties": properties}) + + +@_apply_table_update.register(RemovePropertiesUpdate) +def _(update: RemovePropertiesUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + if len(update.removals) == 0: + return base_metadata + + properties = dict(base_metadata.properties) + for key in update.removals: + properties.pop(key) + + context.add_update(update) + return base_metadata.model_copy(update={"properties": properties}) + + @_apply_table_update.register(AddSchemaUpdate) def _(update: AddSchemaUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: if update.last_column_id < base_metadata.last_column_id: @@ -448,6 +609,43 @@ def _(update: SetCurrentSchemaUpdate, base_metadata: TableMetadata, context: _Ta return base_metadata.model_copy(update={"current_schema_id": new_schema_id}) +@_apply_table_update.register(AddPartitionSpecUpdate) +def _(update: AddPartitionSpecUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + for spec in base_metadata.partition_specs: + if spec.spec_id == update.spec.spec_id: + raise ValueError(f"Partition spec with id {spec.spec_id} already exists: {spec}") + context.add_update(update) + return base_metadata.model_copy( + update={ + "partition_specs": base_metadata.partition_specs + [update.spec], + "last_partition_id": max( + max(field.field_id for field in update.spec.fields), + base_metadata.last_partition_id or PARTITION_FIELD_ID_START - 1, + ), + } + ) + + +@_apply_table_update.register(SetDefaultSpecUpdate) +def _(update: SetDefaultSpecUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + new_spec_id = update.spec_id + if new_spec_id == -1: + new_spec_id = max(spec.spec_id for spec in base_metadata.partition_specs) + if new_spec_id == base_metadata.default_spec_id: + return base_metadata + found_spec_id = False + for spec in base_metadata.partition_specs: + found_spec_id = spec.spec_id == new_spec_id + if found_spec_id: + break + + if not found_spec_id: + raise ValueError(f"Failed to find spec with id {new_spec_id}") + + context.add_update(update) + return base_metadata.model_copy(update={"default_spec_id": new_spec_id}) + + @_apply_table_update.register(AddSnapshotUpdate) def _(update: AddSnapshotUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: if len(base_metadata.schemas) == 0: @@ -518,6 +716,36 @@ def _(update: SetSnapshotRefUpdate, base_metadata: TableMetadata, context: _Tabl return base_metadata.model_copy(update=metadata_updates) +@_apply_table_update.register(AddSortOrderUpdate) +def _(update: AddSortOrderUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + context.add_update(update) + return base_metadata.model_copy( + update={ + "sort_orders": base_metadata.sort_orders + [update.sort_order], + } + ) + + +@_apply_table_update.register(SetDefaultSortOrderUpdate) +def _(update: SetDefaultSortOrderUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + new_sort_order_id = update.sort_order_id + if new_sort_order_id == -1: + # The last added sort order should be in base_metadata.sort_orders at this point + new_sort_order_id = max(sort_order.order_id for sort_order in base_metadata.sort_orders) + if not context.is_added_sort_order(new_sort_order_id): + raise ValueError("Cannot set current sort order to the last added one when no sort order has been added") + + if new_sort_order_id == base_metadata.default_sort_order_id: + return base_metadata + + sort_order = base_metadata.sort_order_by_id(new_sort_order_id) + if sort_order is None: + raise ValueError(f"Sort order with id {new_sort_order_id} does not exist") + + context.add_update(update) + return base_metadata.model_copy(update={"default_sort_order_id": new_sort_order_id}) + + def update_table_metadata(base_metadata: TableMetadata, updates: Tuple[TableUpdate, ...]) -> TableMetadata: """Update the table metadata with the given updates in one transaction. @@ -583,7 +811,7 @@ class AssertRefSnapshotId(TableRequirement): """ type: Literal["assert-ref-snapshot-id"] = Field(default="assert-ref-snapshot-id") - ref: str + ref: str = Field(...) snapshot_id: Optional[int] = Field(default=None, alias="snapshot-id") def validate(self, base_metadata: Optional[TableMetadata]) -> None: @@ -635,7 +863,7 @@ class AssertLastAssignedPartitionId(TableRequirement): """The table's last assigned partition id must match the requirement's `last-assigned-partition-id`.""" type: Literal["assert-last-assigned-partition-id"] = Field(default="assert-last-assigned-partition-id") - last_assigned_partition_id: int = Field(..., alias="last-assigned-partition-id") + last_assigned_partition_id: Optional[int] = Field(..., alias="last-assigned-partition-id") def validate(self, base_metadata: Optional[TableMetadata]) -> None: if base_metadata is None: @@ -676,6 +904,9 @@ def validate(self, base_metadata: Optional[TableMetadata]) -> None: ) +UpdatesAndRequirements = Tuple[Tuple[TableUpdate, ...], Tuple[TableRequirement, ...]] + + class Namespace(IcebergRootModel[List[str]]): """Reference to one or more levels of a namespace.""" @@ -720,6 +951,11 @@ def __init__( self.catalog = catalog def transaction(self) -> Transaction: + """Create a new transaction object to first stage the changes, and then commit them to the catalog. + + Returns: + The transaction object + """ return Transaction(self) def refresh(self) -> Table: @@ -783,6 +1019,12 @@ def sort_orders(self) -> Dict[int, SortOrder]: """Return a dict of the sort orders of this table.""" return {sort_order.order_id: sort_order for sort_order in self.metadata.sort_orders} + def last_partition_id(self) -> int: + """Return the highest assigned partition field ID across all specs or 999 if only the unpartitioned spec exists.""" + if self.metadata.last_partition_id: + return self.metadata.last_partition_id + return PARTITION_FIELD_ID_START - 1 + @property def properties(self) -> Dict[str, str]: """Properties of the table.""" @@ -796,21 +1038,10 @@ def location(self) -> str: def last_sequence_number(self) -> int: return self.metadata.last_sequence_number - def _next_sequence_number(self) -> int: - return INITIAL_SEQUENCE_NUMBER if self.format_version == 1 else self.last_sequence_number + 1 - - def new_snapshot_id(self) -> int: - """Generate a new snapshot-id that's not in use.""" - snapshot_id = _generate_snapshot_id() - while self.snapshot_by_id(snapshot_id) is not None: - snapshot_id = _generate_snapshot_id() - - return snapshot_id - def current_snapshot(self) -> Optional[Snapshot]: """Get the current snapshot for this table, or None if there is no current snapshot.""" - if snapshot_id := self.metadata.current_snapshot_id: - return self.snapshot_by_id(snapshot_id) + if self.metadata.current_snapshot_id is not None: + return self.snapshot_by_id(self.metadata.current_snapshot_id) return None def snapshot_by_id(self, snapshot_id: int) -> Optional[Snapshot]: @@ -828,7 +1059,96 @@ def history(self) -> List[SnapshotLogEntry]: return self.metadata.snapshot_log def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: - return UpdateSchema(self, allow_incompatible_changes=allow_incompatible_changes, case_sensitive=case_sensitive) + """Create a new UpdateSchema to alter the columns of this table. + + Args: + allow_incompatible_changes: If changes are allowed that might break downstream consumers. + case_sensitive: If field names are case-sensitive. + + Returns: + A new UpdateSchema. + """ + return UpdateSchema( + transaction=Transaction(self, autocommit=True), + allow_incompatible_changes=allow_incompatible_changes, + case_sensitive=case_sensitive, + name_mapping=self.name_mapping(), + ) + + def name_mapping(self) -> Optional[NameMapping]: + """Return the table's field-id NameMapping.""" + if name_mapping_json := self.properties.get(TableProperties.DEFAULT_NAME_MAPPING): + return parse_mapping_from_json(name_mapping_json) + else: + return None + + def append(self, df: pa.Table) -> None: + """ + Shorthand API for appending a PyArrow table to the table. + + Args: + df: The Arrow dataframe that will be appended to overwrite the table + """ + try: + import pyarrow as pa + except ModuleNotFoundError as e: + raise ModuleNotFoundError("For writes PyArrow needs to be installed") from e + + if not isinstance(df, pa.Table): + raise ValueError(f"Expected PyArrow table, got: {df}") + + if len(self.spec().fields) > 0: + raise ValueError("Cannot write to partitioned tables") + + _check_schema(self.schema(), other_schema=df.schema) + + with self.transaction() as txn: + with txn.update_snapshot().fast_append() as update_snapshot: + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files( + table_metadata=self.metadata, write_uuid=update_snapshot.commit_uuid, df=df, io=self.io + ) + for data_file in data_files: + update_snapshot.append_data_file(data_file) + + def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_TRUE) -> None: + """ + Shorthand for overwriting the table with a PyArrow table. + + Args: + df: The Arrow dataframe that will be used to overwrite the table + overwrite_filter: ALWAYS_TRUE when you overwrite all the data, + or a boolean expression in case of a partial overwrite + """ + try: + import pyarrow as pa + except ModuleNotFoundError as e: + raise ModuleNotFoundError("For writes PyArrow needs to be installed") from e + + if not isinstance(df, pa.Table): + raise ValueError(f"Expected PyArrow table, got: {df}") + + if overwrite_filter != AlwaysTrue(): + raise NotImplementedError("Cannot overwrite a subset of a table") + + if len(self.spec().fields) > 0: + raise ValueError("Cannot write to partitioned tables") + + _check_schema(self.schema(), other_schema=df.schema) + + with self.transaction() as txn: + with txn.update_snapshot().overwrite() as update_snapshot: + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files( + table_metadata=self.metadata, write_uuid=update_snapshot.commit_uuid, df=df, io=self.io + ) + for data_file in data_files: + update_snapshot.append_data_file(data_file) + + def update_spec(self, case_sensitive: bool = True) -> UpdateSpec: + return UpdateSpec(Transaction(self, autocommit=True), case_sensitive=case_sensitive) def refs(self) -> Dict[str, SnapshotRef]: """Return the snapshot references in the table.""" @@ -865,6 +1185,16 @@ def __repr__(self) -> str: result_str = f"{table_name}(\n {schema_str}\n),\n{partition_str},\n{sort_order_str},\n{snapshot_str}" return result_str + def to_daft(self) -> daft.DataFrame: + """Read a Daft DataFrame lazily from this Iceberg table. + + Returns: + daft.DataFrame: Unmaterialized Daft Dataframe created from the Iceberg table + """ + import daft + + return daft.read_iceberg(self) + class StaticTable(Table): """Load a table directly from a metadata file (i.e., without using a catalog).""" @@ -942,27 +1272,32 @@ def snapshot(self) -> Optional[Snapshot]: return self.table.current_snapshot() def projection(self) -> Schema: - snapshot_schema = self.table.schema() - if snapshot := self.snapshot(): - if snapshot.schema_id is not None: - snapshot_schema = self.table.schemas()[snapshot.schema_id] + current_schema = self.table.schema() + if self.snapshot_id is not None: + snapshot = self.table.snapshot_by_id(self.snapshot_id) + if snapshot is not None: + if snapshot.schema_id is not None: + snapshot_schema = self.table.schemas().get(snapshot.schema_id) + if snapshot_schema is not None: + current_schema = snapshot_schema + else: + warnings.warn(f"Metadata does not contain schema with id: {snapshot.schema_id}") + else: + raise ValueError(f"Snapshot not found: {self.snapshot_id}") if "*" in self.selected_fields: - return snapshot_schema + return current_schema - return snapshot_schema.select(*self.selected_fields, case_sensitive=self.case_sensitive) + return current_schema.select(*self.selected_fields, case_sensitive=self.case_sensitive) @abstractmethod - def plan_files(self) -> Iterable[ScanTask]: - ... + def plan_files(self) -> Iterable[ScanTask]: ... @abstractmethod - def to_arrow(self) -> pa.Table: - ... + def to_arrow(self) -> pa.Table: ... @abstractmethod - def to_pandas(self, **kwargs: Any) -> pd.DataFrame: - ... + def to_pandas(self, **kwargs: Any) -> pd.DataFrame: ... def update(self: S, **overrides: Any) -> S: """Create a copy of this table scan with updated fields.""" @@ -1037,7 +1372,7 @@ def _min_data_file_sequence_number(manifests: List[ManifestFile]) -> int: return INITIAL_SEQUENCE_NUMBER -def _match_deletes_to_datafile(data_entry: ManifestEntry, positional_delete_entries: SortedList[ManifestEntry]) -> Set[DataFile]: +def _match_deletes_to_data_file(data_entry: ManifestEntry, positional_delete_entries: SortedList[ManifestEntry]) -> Set[DataFile]: """Check if the delete file is relevant for the data file. Using the column metrics to see if the filename is in the lower and upper bound. @@ -1052,7 +1387,9 @@ def _match_deletes_to_datafile(data_entry: ManifestEntry, positional_delete_entr relevant_entries = positional_delete_entries[positional_delete_entries.bisect_right(data_entry) :] if len(relevant_entries) > 0: - evaluator = _InclusiveMetricsEvaluator(POSITIONAL_DELETE_SCHEMA, EqualTo("file_path", data_entry.data_file.file_path)) + evaluator = visitors._InclusiveMetricsEvaluator( + POSITIONAL_DELETE_SCHEMA, EqualTo("file_path", data_entry.data_file.file_path) + ) return { positional_delete_entry.data_file for positional_delete_entry in relevant_entries @@ -1076,7 +1413,7 @@ def __init__( super().__init__(table, row_filter, selected_fields, case_sensitive, snapshot_id, options, limit) def _build_partition_projection(self, spec_id: int) -> BooleanExpression: - project = inclusive_projection(self.table.schema(), self.table.specs()[spec_id]) + project = visitors.inclusive_projection(self.table.schema(), self.table.specs()[spec_id]) return project(self.row_filter) @cached_property @@ -1143,7 +1480,7 @@ def plan_files(self) -> Iterable[FileScanTask]: # this filter depends on the partition spec used to write the manifest file partition_evaluators: Dict[int, Callable[[DataFile], bool]] = KeyDefaultDict(self._build_partition_evaluator) - metrics_evaluator = _InclusiveMetricsEvaluator( + metrics_evaluator = visitors._InclusiveMetricsEvaluator( self.table.schema(), self.row_filter, self.case_sensitive, self.options.get("include_empty_files") == "true" ).eval @@ -1181,7 +1518,7 @@ def plan_files(self) -> Iterable[FileScanTask]: return [ FileScanTask( data_entry.data_file, - delete_files=_match_deletes_to_datafile( + delete_files=_match_deletes_to_data_file( data_entry, positional_delete_entries, ), @@ -1232,8 +1569,31 @@ class Move: other_field_id: Optional[int] = None -class UpdateSchema: - _table: Table +U = TypeVar('U') + + +class UpdateTableMetadata(ABC, Generic[U]): + _transaction: Transaction + + def __init__(self, transaction: Transaction) -> None: + self._transaction = transaction + + @abstractmethod + def _commit(self) -> UpdatesAndRequirements: ... + + def commit(self) -> None: + self._transaction._apply(*self._commit()) + + def __exit__(self, _: Any, value: Any, traceback: Any) -> None: + """Close and commit the change.""" + self.commit() + + def __enter__(self) -> U: + """Update the table.""" + return self # type: ignore + + +class UpdateSchema(UpdateTableMetadata["UpdateSchema"]): _schema: Schema _last_column_id: itertools.count[int] _identifier_field_names: Set[str] @@ -1248,18 +1608,25 @@ class UpdateSchema: _id_to_parent: Dict[int, str] = {} _allow_incompatible_changes: bool _case_sensitive: bool - _transaction: Optional[Transaction] def __init__( self, - table: Table, - transaction: Optional[Transaction] = None, + transaction: Transaction, allow_incompatible_changes: bool = False, case_sensitive: bool = True, + schema: Optional[Schema] = None, + name_mapping: Optional[NameMapping] = None, ) -> None: - self._table = table - self._schema = table.schema() - self._last_column_id = itertools.count(table.metadata.last_column_id + 1) + super().__init__(transaction) + + if isinstance(schema, Schema): + self._schema = schema + self._last_column_id = itertools.count(1 + schema.highest_field_id) + else: + self._schema = self._transaction.table_metadata.schema() + self._last_column_id = itertools.count(1 + self._transaction.table_metadata.last_column_id) + + self._name_mapping = name_mapping self._identifier_field_names = self._schema.identifier_field_names() self._adds = {} @@ -1283,14 +1650,6 @@ def get_column_name(field_id: int) -> str: self._case_sensitive = case_sensitive self._transaction = transaction - def __exit__(self, _: Any, value: Any, traceback: Any) -> None: - """Close and commit the change.""" - return self.commit() - - def __enter__(self) -> UpdateSchema: - """Update the table.""" - return self - def case_sensitive(self, case_sensitive: bool) -> UpdateSchema: """Determine if the case of schema needs to be considered when comparing column names. @@ -1303,6 +1662,17 @@ def case_sensitive(self, case_sensitive: bool) -> UpdateSchema: self._case_sensitive = case_sensitive return self + def union_by_name(self, new_schema: Union[Schema, "pa.Schema"]) -> UpdateSchema: + from pyiceberg.catalog import Catalog + + visit_with_partner( + Catalog._convert_schema_if_needed(new_schema), + -1, + UnionByNameVisitor(update_schema=self, existing_schema=self._schema, case_sensitive=self._case_sensitive), # type: ignore + PartnerIdByNameAccessor(partner_schema=self._schema, case_sensitive=self._case_sensitive), + ) + return self + def add_column( self, path: Union[str, Tuple[str, ...]], field_type: IcebergType, doc: Optional[str] = None, required: bool = False ) -> UpdateSchema: @@ -1438,7 +1808,7 @@ def rename_column(self, path_from: Union[str, Tuple[str, ...]], new_name: str) - from_field_correct_casing = self._schema.find_column_name(field_from.field_id) if from_field_correct_casing in self._identifier_field_names: self._identifier_field_names.remove(from_field_correct_casing) - new_identifier_path = f"{from_field_correct_casing[:-len(field_from.name)]}{new_name}" + new_identifier_path = f"{from_field_correct_casing[: -len(field_from.name)]}{new_name}" self._identifier_field_names.add(new_identifier_path) return self @@ -1668,29 +2038,36 @@ def move_after(self, path: Union[str, Tuple[str, ...]], after_name: Union[str, T return self - def commit(self) -> None: + def _commit(self) -> UpdatesAndRequirements: """Apply the pending changes and commit.""" new_schema = self._apply() - existing_schema_id = next((schema.schema_id for schema in self._table.metadata.schemas if schema == new_schema), None) + existing_schema_id = next( + (schema.schema_id for schema in self._transaction.table_metadata.schemas if schema == new_schema), None + ) + + requirements: Tuple[TableRequirement, ...] = () + updates: Tuple[TableUpdate, ...] = () # Check if it is different current schema ID - if existing_schema_id != self._table.schema().schema_id: - requirements = (AssertCurrentSchemaId(current_schema_id=self._schema.schema_id),) + if existing_schema_id != self._schema.schema_id: + requirements += (AssertCurrentSchemaId(current_schema_id=self._schema.schema_id),) if existing_schema_id is None: - last_column_id = max(self._table.metadata.last_column_id, new_schema.highest_field_id) - updates = ( + last_column_id = max(self._transaction.table_metadata.last_column_id, new_schema.highest_field_id) + updates += ( AddSchemaUpdate(schema=new_schema, last_column_id=last_column_id), SetCurrentSchemaUpdate(schema_id=-1), ) else: - updates = (SetCurrentSchemaUpdate(schema_id=existing_schema_id),) # type: ignore + updates += (SetCurrentSchemaUpdate(schema_id=existing_schema_id),) - if self._transaction is not None: - self._transaction._append_updates(*updates) # pylint: disable=W0212 - self._transaction._append_requirements(*requirements) # pylint: disable=W0212 - else: - self._table._do_commit(updates=updates, requirements=requirements) # pylint: disable=W0212 + if name_mapping := self._name_mapping: + updated_name_mapping = update_mapping(name_mapping, self._updates, self._adds) + updates += ( + SetPropertiesUpdate(updates={TableProperties.DEFAULT_NAME_MAPPING: updated_name_mapping.model_dump_json()}), + ) + + return updates, requirements def _apply(self) -> Schema: """Apply the pending changes to the original schema and returns the result. @@ -1716,7 +2093,14 @@ def _apply(self) -> Schema: field_ids.add(field.field_id) - return Schema(*struct.fields, schema_id=1 + max(self._table.schemas().keys()), identifier_field_ids=field_ids) + if txn := self._transaction: + next_schema_id = 1 + ( + max(schema.schema_id for schema in txn.table_metadata.schemas) if txn.table_metadata is not None else 0 + ) + else: + next_schema_id = 0 + + return Schema(*struct.fields, schema_id=next_schema_id, identifier_field_ids=field_ids) def assign_new_column_id(self) -> int: return next(self._last_column_id) @@ -1849,6 +2233,159 @@ def primitive(self, primitive: PrimitiveType) -> Optional[IcebergType]: return primitive +class UnionByNameVisitor(SchemaWithPartnerVisitor[int, bool]): + update_schema: UpdateSchema + existing_schema: Schema + case_sensitive: bool + + def __init__(self, update_schema: UpdateSchema, existing_schema: Schema, case_sensitive: bool) -> None: + self.update_schema = update_schema + self.existing_schema = existing_schema + self.case_sensitive = case_sensitive + + def schema(self, schema: Schema, partner_id: Optional[int], struct_result: bool) -> bool: + return struct_result + + def struct(self, struct: StructType, partner_id: Optional[int], missing_positions: List[bool]) -> bool: + if partner_id is None: + return True + + fields = struct.fields + partner_struct = self._find_field_type(partner_id) + + if not partner_struct.is_struct: + raise ValueError(f"Expected a struct, got: {partner_struct}") + + for pos, missing in enumerate(missing_positions): + if missing: + self._add_column(partner_id, fields[pos]) + else: + field = fields[pos] + if nested_field := partner_struct.field_by_name(field.name, case_sensitive=self.case_sensitive): + self._update_column(field, nested_field) + + return False + + def _add_column(self, parent_id: int, field: NestedField) -> None: + if parent_name := self.existing_schema.find_column_name(parent_id): + path: Tuple[str, ...] = (parent_name, field.name) + else: + path = (field.name,) + + self.update_schema.add_column(path=path, field_type=field.field_type, required=field.required, doc=field.doc) + + def _update_column(self, field: NestedField, existing_field: NestedField) -> None: + full_name = self.existing_schema.find_column_name(existing_field.field_id) + + if full_name is None: + raise ValueError(f"Could not find field: {existing_field}") + + if field.optional and existing_field.required: + self.update_schema.make_column_optional(full_name) + + if field.field_type.is_primitive and field.field_type != existing_field.field_type: + self.update_schema.update_column(full_name, field_type=field.field_type) + + if field.doc is not None and not field.doc != existing_field.doc: + self.update_schema.update_column(full_name, doc=field.doc) + + def _find_field_type(self, field_id: int) -> IcebergType: + if field_id == -1: + return self.existing_schema.as_struct() + else: + return self.existing_schema.find_field(field_id).field_type + + def field(self, field: NestedField, partner_id: Optional[int], field_result: bool) -> bool: + return partner_id is None + + def list(self, list_type: ListType, list_partner_id: Optional[int], element_missing: bool) -> bool: + if list_partner_id is None: + return True + + if element_missing: + raise ValueError("Error traversing schemas: element is missing, but list is present") + + partner_list_type = self._find_field_type(list_partner_id) + if not isinstance(partner_list_type, ListType): + raise ValueError(f"Expected list-type, got: {partner_list_type}") + + self._update_column(list_type.element_field, partner_list_type.element_field) + + return False + + def map(self, map_type: MapType, map_partner_id: Optional[int], key_missing: bool, value_missing: bool) -> bool: + if map_partner_id is None: + return True + + if key_missing: + raise ValueError("Error traversing schemas: key is missing, but map is present") + + if value_missing: + raise ValueError("Error traversing schemas: value is missing, but map is present") + + partner_map_type = self._find_field_type(map_partner_id) + if not isinstance(partner_map_type, MapType): + raise ValueError(f"Expected map-type, got: {partner_map_type}") + + self._update_column(map_type.key_field, partner_map_type.key_field) + self._update_column(map_type.value_field, partner_map_type.value_field) + + return False + + def primitive(self, primitive: PrimitiveType, primitive_partner_id: Optional[int]) -> bool: + return primitive_partner_id is None + + +class PartnerIdByNameAccessor(PartnerAccessor[int]): + partner_schema: Schema + case_sensitive: bool + + def __init__(self, partner_schema: Schema, case_sensitive: bool) -> None: + self.partner_schema = partner_schema + self.case_sensitive = case_sensitive + + def schema_partner(self, partner: Optional[int]) -> Optional[int]: + return -1 + + def field_partner(self, partner_field_id: Optional[int], field_id: int, field_name: str) -> Optional[int]: + if partner_field_id is not None: + if partner_field_id == -1: + struct = self.partner_schema.as_struct() + else: + struct = self.partner_schema.find_field(partner_field_id).field_type + if not struct.is_struct: + raise ValueError(f"Expected StructType: {struct}") + + if field := struct.field_by_name(name=field_name, case_sensitive=self.case_sensitive): + return field.field_id + + return None + + def list_element_partner(self, partner_list_id: Optional[int]) -> Optional[int]: + if partner_list_id is not None and (field := self.partner_schema.find_field(partner_list_id)): + if not isinstance(field.field_type, ListType): + raise ValueError(f"Expected ListType: {field}") + return field.field_type.element_field.field_id + else: + return None + + def map_key_partner(self, partner_map_id: Optional[int]) -> Optional[int]: + if partner_map_id is not None and (field := self.partner_schema.find_field(partner_map_id)): + if not isinstance(field.field_type, MapType): + raise ValueError(f"Expected MapType: {field}") + return field.field_type.key_field.field_id + else: + return None + + def map_value_partner(self, partner_map_id: Optional[int]) -> Optional[int]: + if partner_map_id is not None and (field := self.partner_schema.find_field(partner_map_id)): + if not isinstance(field.field_type, MapType): + raise ValueError(f"Expected MapType: {field}") + return field.field_type.value_field.field_id + else: + return None + + def _add_fields(fields: Tuple[NestedField, ...], adds: Optional[List[NestedField]]) -> Tuple[NestedField, ...]: adds = adds or [] return fields + tuple(adds) @@ -1892,15 +2429,536 @@ def _add_and_move_fields( return None if len(adds) == 0 else tuple(*fields, *adds) -def _generate_snapshot_id() -> int: - """Generate a new Snapshot ID from a UUID. +@dataclass(frozen=True) +class WriteTask: + write_uuid: uuid.UUID + task_id: int + df: pa.Table + sort_order_id: Optional[int] = None + + # Later to be extended with partition information + + def generate_data_file_filename(self, extension: str) -> str: + # Mimics the behavior in the Java API: + # https://github.com/apache/iceberg/blob/a582968975dd30ff4917fbbe999f1be903efac02/core/src/main/java/org/apache/iceberg/io/OutputFileFactory.java#L92-L101 + return f"00000-{self.task_id}-{self.write_uuid}.{extension}" + + +def _new_manifest_path(location: str, num: int, commit_uuid: uuid.UUID) -> str: + return f'{location}/metadata/{commit_uuid}-m{num}.avro' + + +def _generate_manifest_list_path(location: str, snapshot_id: int, attempt: int, commit_uuid: uuid.UUID) -> str: + # Mimics the behavior in Java: + # https://github.com/apache/iceberg/blob/c862b9177af8e2d83122220764a056f3b96fd00c/core/src/main/java/org/apache/iceberg/SnapshotProducer.java#L491 + return f'{location}/metadata/snap-{snapshot_id}-{attempt}-{commit_uuid}.avro' + - Returns: An 64 bit long +def _dataframe_to_data_files( + table_metadata: TableMetadata, df: pa.Table, io: FileIO, write_uuid: Optional[uuid.UUID] = None +) -> Iterable[DataFile]: + """Convert a PyArrow table into a DataFile. + + Returns: + An iterable that supplies datafiles that represent the table. """ - rnd_uuid = uuid.uuid4() - snapshot_id = int.from_bytes( - bytes(lhs ^ rhs for lhs, rhs in zip(rnd_uuid.bytes[0:8], rnd_uuid.bytes[8:16])), byteorder='little', signed=True - ) - snapshot_id = snapshot_id if snapshot_id >= 0 else snapshot_id * -1 + from pyiceberg.io.pyarrow import write_file + + if len([spec for spec in table_metadata.partition_specs if spec.spec_id != 0]) > 0: + raise ValueError("Cannot write to partitioned tables") + + counter = itertools.count(0) + write_uuid = write_uuid or uuid.uuid4() + + # This is an iter, so we don't have to materialize everything every time + # This will be more relevant when we start doing partitioned writes + yield from write_file(io=io, table_metadata=table_metadata, tasks=iter([WriteTask(write_uuid, next(counter), df)])) - return snapshot_id + +class _MergingSnapshotProducer(UpdateTableMetadata["_MergingSnapshotProducer"]): + commit_uuid: uuid.UUID + _operation: Operation + _snapshot_id: int + _parent_snapshot_id: Optional[int] + _added_data_files: List[DataFile] + + def __init__( + self, + operation: Operation, + transaction: Transaction, + io: FileIO, + commit_uuid: Optional[uuid.UUID] = None, + ) -> None: + super().__init__(transaction) + self.commit_uuid = commit_uuid or uuid.uuid4() + self._io = io + self._operation = operation + self._snapshot_id = self._transaction.table_metadata.new_snapshot_id() + # Since we only support the main branch for now + self._parent_snapshot_id = ( + snapshot.snapshot_id if (snapshot := self._transaction.table_metadata.current_snapshot()) else None + ) + self._added_data_files = [] + + def append_data_file(self, data_file: DataFile) -> _MergingSnapshotProducer: + self._added_data_files.append(data_file) + return self + + @abstractmethod + def _deleted_entries(self) -> List[ManifestEntry]: ... + + @abstractmethod + def _existing_manifests(self) -> List[ManifestFile]: ... + + def _manifests(self) -> List[ManifestFile]: + def _write_added_manifest() -> List[ManifestFile]: + if self._added_data_files: + output_file_location = _new_manifest_path( + location=self._transaction.table_metadata.location, num=0, commit_uuid=self.commit_uuid + ) + with write_manifest( + format_version=self._transaction.table_metadata.format_version, + spec=self._transaction.table_metadata.spec(), + schema=self._transaction.table_metadata.schema(), + output_file=self._io.new_output(output_file_location), + snapshot_id=self._snapshot_id, + ) as writer: + for data_file in self._added_data_files: + writer.add_entry( + ManifestEntry( + status=ManifestEntryStatus.ADDED, + snapshot_id=self._snapshot_id, + data_sequence_number=None, + file_sequence_number=None, + data_file=data_file, + ) + ) + return [writer.to_manifest_file()] + else: + return [] + + def _write_delete_manifest() -> List[ManifestFile]: + # Check if we need to mark the files as deleted + deleted_entries = self._deleted_entries() + if len(deleted_entries) > 0: + output_file_location = _new_manifest_path( + location=self._transaction.table_metadata.location, num=1, commit_uuid=self.commit_uuid + ) + + with write_manifest( + format_version=self._transaction.table_metadata.format_version, + spec=self._transaction.table_metadata.spec(), + schema=self._transaction.table_metadata.schema(), + output_file=self._io.new_output(output_file_location), + snapshot_id=self._snapshot_id, + ) as writer: + for delete_entry in deleted_entries: + writer.add_entry(delete_entry) + return [writer.to_manifest_file()] + else: + return [] + + executor = ExecutorFactory.get_or_create() + + added_manifests = executor.submit(_write_added_manifest) + delete_manifests = executor.submit(_write_delete_manifest) + existing_manifests = executor.submit(self._existing_manifests) + + return added_manifests.result() + delete_manifests.result() + existing_manifests.result() + + def _summary(self) -> Summary: + ssc = SnapshotSummaryCollector() + + for data_file in self._added_data_files: + ssc.add_file(data_file=data_file) + + previous_snapshot = ( + self._transaction.table_metadata.snapshot_by_id(self._parent_snapshot_id) + if self._parent_snapshot_id is not None + else None + ) + + return update_snapshot_summaries( + summary=Summary(operation=self._operation, **ssc.build()), + previous_summary=previous_snapshot.summary if previous_snapshot is not None else None, + truncate_full_table=self._operation == Operation.OVERWRITE, + ) + + def _commit(self) -> UpdatesAndRequirements: + new_manifests = self._manifests() + next_sequence_number = self._transaction.table_metadata.next_sequence_number() + + summary = self._summary() + + manifest_list_file_path = _generate_manifest_list_path( + location=self._transaction.table_metadata.location, + snapshot_id=self._snapshot_id, + attempt=0, + commit_uuid=self.commit_uuid, + ) + with write_manifest_list( + format_version=self._transaction.table_metadata.format_version, + output_file=self._io.new_output(manifest_list_file_path), + snapshot_id=self._snapshot_id, + parent_snapshot_id=self._parent_snapshot_id, + sequence_number=next_sequence_number, + ) as writer: + writer.add_manifests(new_manifests) + + snapshot = Snapshot( + snapshot_id=self._snapshot_id, + parent_snapshot_id=self._parent_snapshot_id, + manifest_list=manifest_list_file_path, + sequence_number=next_sequence_number, + summary=summary, + schema_id=self._transaction.table_metadata.current_schema_id, + ) + + return ( + ( + AddSnapshotUpdate(snapshot=snapshot), + SetSnapshotRefUpdate( + snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, ref_name="main", type="branch" + ), + ), + ( + AssertTableUUID(uuid=self._transaction.table_metadata.table_uuid), + AssertRefSnapshotId(snapshot_id=self._parent_snapshot_id, ref="main"), + ), + ) + + +class FastAppendFiles(_MergingSnapshotProducer): + def _existing_manifests(self) -> List[ManifestFile]: + """To determine if there are any existing manifest files. + + A fast append will add another ManifestFile to the ManifestList. + All the existing manifest files are considered existing. + """ + existing_manifests = [] + + if self._parent_snapshot_id is not None: + previous_snapshot = self._transaction.table_metadata.snapshot_by_id(self._parent_snapshot_id) + + if previous_snapshot is None: + raise ValueError(f"Snapshot could not be found: {self._parent_snapshot_id}") + + for manifest in previous_snapshot.manifests(io=self._io): + if manifest.has_added_files() or manifest.has_existing_files() or manifest.added_snapshot_id == self._snapshot_id: + existing_manifests.append(manifest) + + return existing_manifests + + def _deleted_entries(self) -> List[ManifestEntry]: + """To determine if we need to record any deleted manifest entries. + + In case of an append, nothing is deleted. + """ + return [] + + +class OverwriteFiles(_MergingSnapshotProducer): + def _existing_manifests(self) -> List[ManifestFile]: + """To determine if there are any existing manifest files. + + In the of a full overwrite, all the previous manifests are + considered deleted. + """ + return [] + + def _deleted_entries(self) -> List[ManifestEntry]: + """To determine if we need to record any deleted entries. + + With a full overwrite all the entries are considered deleted. + With partial overwrites we have to use the predicate to evaluate + which entries are affected. + """ + if self._parent_snapshot_id is not None: + previous_snapshot = self._transaction.table_metadata.snapshot_by_id(self._parent_snapshot_id) + if previous_snapshot is None: + # This should never happen since you cannot overwrite an empty table + raise ValueError(f"Could not find the previous snapshot: {self._parent_snapshot_id}") + + executor = ExecutorFactory.get_or_create() + + def _get_entries(manifest: ManifestFile) -> List[ManifestEntry]: + return [ + ManifestEntry( + status=ManifestEntryStatus.DELETED, + snapshot_id=entry.snapshot_id, + data_sequence_number=entry.data_sequence_number, + file_sequence_number=entry.file_sequence_number, + data_file=entry.data_file, + ) + for entry in manifest.fetch_manifest_entry(self._io, discard_deleted=True) + if entry.data_file.content == DataFileContent.DATA + ] + + list_of_entries = executor.map(_get_entries, previous_snapshot.manifests(self._io)) + return list(chain(*list_of_entries)) + else: + return [] + + +class UpdateSnapshot: + _transaction: Transaction + _io: FileIO + + def __init__(self, transaction: Transaction, io: FileIO) -> None: + self._transaction = transaction + self._io = io + + def fast_append(self) -> FastAppendFiles: + return FastAppendFiles(operation=Operation.APPEND, transaction=self._transaction, io=self._io) + + def overwrite(self) -> OverwriteFiles: + return OverwriteFiles( + operation=Operation.OVERWRITE + if self._transaction.table_metadata.current_snapshot() is not None + else Operation.APPEND, + transaction=self._transaction, + io=self._io, + ) + + +class UpdateSpec(UpdateTableMetadata["UpdateSpec"]): + _transaction: Transaction + _name_to_field: Dict[str, PartitionField] = {} + _name_to_added_field: Dict[str, PartitionField] = {} + _transform_to_field: Dict[Tuple[int, str], PartitionField] = {} + _transform_to_added_field: Dict[Tuple[int, str], PartitionField] = {} + _renames: Dict[str, str] = {} + _added_time_fields: Dict[int, PartitionField] = {} + _case_sensitive: bool + _adds: List[PartitionField] + _deletes: Set[int] + _last_assigned_partition_id: int + + def __init__(self, transaction: Transaction, case_sensitive: bool = True) -> None: + super().__init__(transaction) + self._name_to_field = {field.name: field for field in transaction.table_metadata.spec().fields} + self._name_to_added_field = {} + self._transform_to_field = { + (field.source_id, repr(field.transform)): field for field in transaction.table_metadata.spec().fields + } + self._transform_to_added_field = {} + self._adds = [] + self._deletes = set() + self._last_assigned_partition_id = transaction.table_metadata.last_partition_id or PARTITION_FIELD_ID_START - 1 + self._renames = {} + self._transaction = transaction + self._case_sensitive = case_sensitive + self._added_time_fields = {} + + def add_field( + self, + source_column_name: str, + transform: Transform[Any, Any], + partition_field_name: Optional[str] = None, + ) -> UpdateSpec: + ref = Reference(source_column_name) + bound_ref = ref.bind(self._transaction.table_metadata.schema(), self._case_sensitive) + # verify transform can actually bind it + output_type = bound_ref.field.field_type + if not transform.can_transform(output_type): + raise ValueError(f"{transform} cannot transform {output_type} values from {bound_ref.field.name}") + + transform_key = (bound_ref.field.field_id, repr(transform)) + existing_partition_field = self._transform_to_field.get(transform_key) + if existing_partition_field and self._is_duplicate_partition(transform, existing_partition_field): + raise ValueError(f"Duplicate partition field for ${ref.name}=${ref}, ${existing_partition_field} already exists") + + added = self._transform_to_added_field.get(transform_key) + if added: + raise ValueError(f"Already added partition: {added.name}") + + new_field = self._partition_field((bound_ref.field.field_id, transform), partition_field_name) + if new_field.name in self._name_to_added_field: + raise ValueError(f"Already added partition field with name: {new_field.name}") + + if isinstance(new_field.transform, TimeTransform): + existing_time_field = self._added_time_fields.get(new_field.source_id) + if existing_time_field: + raise ValueError(f"Cannot add time partition field: {new_field.name} conflicts with {existing_time_field.name}") + self._added_time_fields[new_field.source_id] = new_field + self._transform_to_added_field[transform_key] = new_field + + existing_partition_field = self._name_to_field.get(new_field.name) + if existing_partition_field and new_field.field_id not in self._deletes: + if isinstance(existing_partition_field.transform, VoidTransform): + self.rename_field( + existing_partition_field.name, existing_partition_field.name + "_" + str(existing_partition_field.field_id) + ) + else: + raise ValueError(f"Cannot add duplicate partition field name: {existing_partition_field.name}") + + self._name_to_added_field[new_field.name] = new_field + self._adds.append(new_field) + return self + + def add_identity(self, source_column_name: str) -> UpdateSpec: + return self.add_field(source_column_name, IdentityTransform(), None) + + def remove_field(self, name: str) -> UpdateSpec: + added = self._name_to_added_field.get(name) + if added: + raise ValueError(f"Cannot delete newly added field {name}") + renamed = self._renames.get(name) + if renamed: + raise ValueError(f"Cannot rename and delete field {name}") + field = self._name_to_field.get(name) + if not field: + raise ValueError(f"No such partition field: {name}") + + self._deletes.add(field.field_id) + return self + + def rename_field(self, name: str, new_name: str) -> UpdateSpec: + existing_field = self._name_to_field.get(new_name) + if existing_field and isinstance(existing_field.transform, VoidTransform): + return self.rename_field(name, name + "_" + str(existing_field.field_id)) + added = self._name_to_added_field.get(name) + if added: + raise ValueError("Cannot rename recently added partitions") + field = self._name_to_field.get(name) + if not field: + raise ValueError(f"Cannot find partition field {name}") + if field.field_id in self._deletes: + raise ValueError(f"Cannot delete and rename partition field {name}") + self._renames[name] = new_name + return self + + def _commit(self) -> UpdatesAndRequirements: + new_spec = self._apply() + updates: Tuple[TableUpdate, ...] = () + requirements: Tuple[TableRequirement, ...] = () + + if self._transaction.table_metadata.default_spec_id != new_spec.spec_id: + if new_spec.spec_id not in self._transaction.table_metadata.specs(): + updates = ( + AddPartitionSpecUpdate(spec=new_spec), + SetDefaultSpecUpdate(spec_id=-1), + ) + else: + updates = (SetDefaultSpecUpdate(spec_id=new_spec.spec_id),) + + required_last_assigned_partitioned_id = self._transaction.table_metadata.last_partition_id + requirements = (AssertLastAssignedPartitionId(last_assigned_partition_id=required_last_assigned_partitioned_id),) + + return updates, requirements + + def _apply(self) -> PartitionSpec: + def _check_and_add_partition_name(schema: Schema, name: str, source_id: int, partition_names: Set[str]) -> None: + try: + field = schema.find_field(name) + except ValueError: + field = None + + if source_id is not None and field is not None and field.field_id != source_id: + raise ValueError(f"Cannot create identity partition from a different field in the schema {name}") + elif field is not None and source_id != field.field_id: + raise ValueError(f"Cannot create partition from name that exists in schema {name}") + if not name: + raise ValueError("Undefined name") + if name in partition_names: + raise ValueError(f"Partition name has to be unique: {name}") + partition_names.add(name) + + def _add_new_field( + schema: Schema, source_id: int, field_id: int, name: str, transform: Transform[Any, Any], partition_names: Set[str] + ) -> PartitionField: + _check_and_add_partition_name(schema, name, source_id, partition_names) + return PartitionField(source_id, field_id, transform, name) + + partition_fields = [] + partition_names: Set[str] = set() + for field in self._transaction.table_metadata.spec().fields: + if field.field_id not in self._deletes: + renamed = self._renames.get(field.name) + if renamed: + new_field = _add_new_field( + self._transaction.table_metadata.schema(), + field.source_id, + field.field_id, + renamed, + field.transform, + partition_names, + ) + else: + new_field = _add_new_field( + self._transaction.table_metadata.schema(), + field.source_id, + field.field_id, + field.name, + field.transform, + partition_names, + ) + partition_fields.append(new_field) + elif self._transaction.table_metadata.format_version == 1: + renamed = self._renames.get(field.name) + if renamed: + new_field = _add_new_field( + self._transaction.table_metadata.schema(), + field.source_id, + field.field_id, + renamed, + VoidTransform(), + partition_names, + ) + else: + new_field = _add_new_field( + self._transaction.table_metadata.schema(), + field.source_id, + field.field_id, + field.name, + VoidTransform(), + partition_names, + ) + + partition_fields.append(new_field) + + for added_field in self._adds: + new_field = PartitionField( + source_id=added_field.source_id, + field_id=added_field.field_id, + transform=added_field.transform, + name=added_field.name, + ) + partition_fields.append(new_field) + + # Reuse spec id or create a new one. + new_spec = PartitionSpec(*partition_fields) + new_spec_id = INITIAL_PARTITION_SPEC_ID + for spec in self._transaction.table_metadata.specs().values(): + if new_spec.compatible_with(spec): + new_spec_id = spec.spec_id + break + elif new_spec_id <= spec.spec_id: + new_spec_id = spec.spec_id + 1 + return PartitionSpec(*partition_fields, spec_id=new_spec_id) + + def _partition_field(self, transform_key: Tuple[int, Transform[Any, Any]], name: Optional[str]) -> PartitionField: + if self._transaction.table_metadata.format_version == 2: + source_id, transform = transform_key + historical_fields = [] + for spec in self._transaction.table_metadata.specs().values(): + for field in spec.fields: + historical_fields.append((field.source_id, field.field_id, repr(field.transform), field.name)) + + for field_key in historical_fields: + if field_key[0] == source_id and field_key[2] == repr(transform): + if name is None or field_key[3] == name: + return PartitionField(source_id, field_key[1], transform, name) + + new_field_id = self._new_field_id() + if name is None: + tmp_field = PartitionField(transform_key[0], new_field_id, transform_key[1], 'unassigned_field_name') + name = _visit_partition_field(self._transaction.table_metadata.schema(), tmp_field, _PartitionNameGenerator()) + return PartitionField(transform_key[0], new_field_id, transform_key[1], name) + + def _new_field_id(self) -> int: + self._last_assigned_partition_id += 1 + return self._last_assigned_partition_id + + def _is_duplicate_partition(self, transform: Transform[Any, Any], partition_field: PartitionField) -> bool: + return partition_field.field_id not in self._deletes and partition_field.transform == transform diff --git a/pyiceberg/table/metadata.py b/pyiceberg/table/metadata.py index 43e29c7b03..323f6d85a8 100644 --- a/pyiceberg/table/metadata.py +++ b/pyiceberg/table/metadata.py @@ -28,7 +28,7 @@ Union, ) -from pydantic import Field, model_validator +from pydantic import Field, field_serializer, field_validator, model_validator from pydantic import ValidationError as PydanticValidationError from typing_extensions import Annotated @@ -49,6 +49,8 @@ IcebergRootModel, Properties, ) +from pyiceberg.types import transform_dict_value_to_str +from pyiceberg.utils.config import Config from pyiceberg.utils.datetime import datetime_to_millis CURRENT_SNAPSHOT_ID = "current-snapshot-id" @@ -218,6 +220,11 @@ class TableMetadataCommonFields(IcebergBaseModel): There is always a main branch reference pointing to the current-snapshot-id even if the refs map is null.""" + # validators + @field_validator('properties', mode='before') + def transform_properties_dict_value_to_str(cls, properties: Properties) -> Dict[str, str]: + return transform_dict_value_to_str(properties) + def snapshot_by_id(self, snapshot_id: int) -> Optional[Snapshot]: """Get the snapshot by snapshot_id.""" return next((snapshot for snapshot in self.snapshots if snapshot.snapshot_id == snapshot_id), None) @@ -226,6 +233,59 @@ def schema_by_id(self, schema_id: int) -> Optional[Schema]: """Get the schema by schema_id.""" return next((schema for schema in self.schemas if schema.schema_id == schema_id), None) + def schema(self) -> Schema: + """Return the schema for this table.""" + return next(schema for schema in self.schemas if schema.schema_id == self.current_schema_id) + + def spec(self) -> PartitionSpec: + """Return the partition spec of this table.""" + return next(spec for spec in self.partition_specs if spec.spec_id == self.default_spec_id) + + def specs(self) -> Dict[int, PartitionSpec]: + """Return a dict the partition specs this table.""" + return {spec.spec_id: spec for spec in self.partition_specs} + + def new_snapshot_id(self) -> int: + """Generate a new snapshot-id that's not in use.""" + snapshot_id = _generate_snapshot_id() + while self.snapshot_by_id(snapshot_id) is not None: + snapshot_id = _generate_snapshot_id() + + return snapshot_id + + def current_snapshot(self) -> Optional[Snapshot]: + """Get the current snapshot for this table, or None if there is no current snapshot.""" + if self.current_snapshot_id is not None: + return self.snapshot_by_id(self.current_snapshot_id) + return None + + def next_sequence_number(self) -> int: + return self.last_sequence_number + 1 if self.format_version > 1 else INITIAL_SEQUENCE_NUMBER + + def sort_order_by_id(self, sort_order_id: int) -> Optional[SortOrder]: + """Get the sort order by sort_order_id.""" + return next((sort_order for sort_order in self.sort_orders if sort_order.order_id == sort_order_id), None) + + @field_serializer('current_snapshot_id') + def serialize_current_snapshot_id(self, current_snapshot_id: Optional[int]) -> Optional[int]: + if current_snapshot_id is None and Config().get_bool("legacy-current-snapshot-id"): + return -1 + return current_snapshot_id + + +def _generate_snapshot_id() -> int: + """Generate a new Snapshot ID from a UUID. + + Returns: An 64 bit long + """ + rnd_uuid = uuid.uuid4() + snapshot_id = int.from_bytes( + bytes(lhs ^ rhs for lhs, rhs in zip(rnd_uuid.bytes[0:8], rnd_uuid.bytes[8:16])), byteorder='little', signed=True + ) + snapshot_id = snapshot_id if snapshot_id >= 0 else snapshot_id * -1 + + return snapshot_id + class TableMetadataV1(TableMetadataCommonFields, IcebergBaseModel): """Represents version 1 of the Table Metadata. @@ -260,8 +320,10 @@ def set_v2_compatible_defaults(cls, data: Dict[str, Any]) -> Dict[str, Any]: The TableMetadata with the defaults applied. """ # When the schema doesn't have an ID - if data.get("schema") and "schema_id" not in data["schema"]: - data["schema"]["schema_id"] = DEFAULT_SCHEMA_ID + schema = data.get("schema") + if isinstance(schema, dict): + if "schema_id" not in schema and "schema-id" not in schema: + schema["schema_id"] = DEFAULT_SCHEMA_ID return data @@ -308,7 +370,8 @@ def construct_partition_specs(cls, data: Dict[str, Any]) -> Dict[str, Any]: data[PARTITION_SPECS] = [{"field-id": 0, "fields": ()}] data[LAST_PARTITION_ID] = max( - [field.get(FIELD_ID) for spec in data[PARTITION_SPECS] for field in spec[FIELDS]], default=PARTITION_FIELD_ID_START + [field.get(FIELD_ID) for spec in data[PARTITION_SPECS] for field in spec[FIELDS]], + default=PARTITION_FIELD_ID_START - 1, ) return data @@ -335,7 +398,7 @@ def to_v2(self) -> TableMetadataV2: metadata["format-version"] = 2 return TableMetadataV2.model_validate(metadata) - format_version: Literal[1] = Field(alias="format-version") + format_version: Literal[1] = Field(alias="format-version", default=1) """An integer version number for the format. Currently, this can be 1 or 2 based on the spec. Implementations must throw an exception if a table’s version is higher than the supported version.""" @@ -404,6 +467,8 @@ def new_table_metadata( properties: Properties = EMPTY_DICT, table_uuid: Optional[uuid.UUID] = None, ) -> TableMetadata: + from pyiceberg.table import TableProperties + fresh_schema = assign_fresh_schema_ids(schema) fresh_partition_spec = assign_fresh_partition_spec_ids(partition_spec, schema, fresh_schema) fresh_sort_order = assign_fresh_sort_order_ids(sort_order, schema, fresh_schema) @@ -411,6 +476,24 @@ def new_table_metadata( if table_uuid is None: table_uuid = uuid.uuid4() + # Remove format-version so it does not get persisted + format_version = int(properties.pop(TableProperties.FORMAT_VERSION, TableProperties.DEFAULT_FORMAT_VERSION)) + if format_version == 1: + return TableMetadataV1( + location=location, + last_column_id=fresh_schema.highest_field_id, + current_schema_id=fresh_schema.schema_id, + schema=fresh_schema, + partition_spec=[field.model_dump() for field in fresh_partition_spec.fields], + partition_specs=[fresh_partition_spec], + default_spec_id=fresh_partition_spec.spec_id, + sort_orders=[fresh_sort_order], + default_sort_order_id=fresh_sort_order.order_id, + properties=properties, + last_partition_id=fresh_partition_spec.last_assigned_field_id, + table_uuid=table_uuid, + ) + return TableMetadataV2( location=location, schemas=[fresh_schema], diff --git a/pyiceberg/table/name_mapping.py b/pyiceberg/table/name_mapping.py index b07fbc0735..baa15f168d 100644 --- a/pyiceberg/table/name_mapping.py +++ b/pyiceberg/table/name_mapping.py @@ -20,12 +20,13 @@ More information can be found on here: https://iceberg.apache.org/spec/#name-mapping-serialization """ + from __future__ import annotations from abc import ABC, abstractmethod from collections import ChainMap from functools import cached_property, singledispatch -from typing import Any, Dict, Generic, List, TypeVar, Union +from typing import Any, Dict, Generic, Iterator, List, Optional, TypeVar, Union from pydantic import Field, conlist, field_validator, model_serializer @@ -44,6 +45,18 @@ class MappedField(IcebergBaseModel): def convert_null_to_empty_List(cls, v: Any) -> Any: return v or [] + @field_validator('names', mode='after') + @classmethod + def check_at_least_one(cls, v: List[str]) -> Any: + """ + Conlist constraint does not seem to be validating the class on instantiation. + + Adding a custom validator to enforce min_length=1 constraint. + """ + if len(v) < 1: + raise ValueError("At least one mapped name must be provided for the field") + return v + @model_serializer def ser_model(self) -> Dict[str, Any]: """Set custom serializer to leave out the field when it is empty.""" @@ -84,6 +97,10 @@ def __len__(self) -> int: """Return the number of mappings.""" return len(self.root) + def __iter__(self) -> Iterator[MappedField]: + """Iterate over the mapped fields.""" + return iter(self.root) + def __str__(self) -> str: """Convert the name-mapping into a nicely formatted string.""" if len(self.root) == 0: @@ -92,24 +109,25 @@ def __str__(self) -> str: return "[\n " + "\n ".join([str(e) for e in self.root]) + "\n]" +S = TypeVar('S') T = TypeVar("T") -class NameMappingVisitor(Generic[T], ABC): +class NameMappingVisitor(Generic[S, T], ABC): @abstractmethod - def mapping(self, nm: NameMapping, field_results: T) -> T: + def mapping(self, nm: NameMapping, field_results: S) -> S: """Visit a NameMapping.""" @abstractmethod - def fields(self, struct: List[MappedField], field_results: List[T]) -> T: + def fields(self, struct: List[MappedField], field_results: List[T]) -> S: """Visit a List[MappedField].""" @abstractmethod - def field(self, field: MappedField, field_result: T) -> T: + def field(self, field: MappedField, field_result: S) -> T: """Visit a MappedField.""" -class _IndexByName(NameMappingVisitor[Dict[str, MappedField]]): +class _IndexByName(NameMappingVisitor[Dict[str, MappedField], Dict[str, MappedField]]): def mapping(self, nm: NameMapping, field_results: Dict[str, MappedField]) -> Dict[str, MappedField]: return field_results @@ -128,18 +146,18 @@ def field(self, field: MappedField, field_result: Dict[str, MappedField]) -> Dic @singledispatch -def visit_name_mapping(obj: Union[NameMapping, List[MappedField], MappedField], visitor: NameMappingVisitor[T]) -> T: +def visit_name_mapping(obj: Union[NameMapping, List[MappedField], MappedField], visitor: NameMappingVisitor[S, T]) -> S: """Traverse the name mapping in post-order traversal.""" raise NotImplementedError(f"Cannot visit non-type: {obj}") @visit_name_mapping.register(NameMapping) -def _(obj: NameMapping, visitor: NameMappingVisitor[T]) -> T: +def _(obj: NameMapping, visitor: NameMappingVisitor[S, T]) -> S: return visitor.mapping(obj, visit_name_mapping(obj.root, visitor)) @visit_name_mapping.register(list) -def _(fields: List[MappedField], visitor: NameMappingVisitor[T]) -> T: +def _(fields: List[MappedField], visitor: NameMappingVisitor[S, T]) -> S: results = [visitor.field(field, visit_name_mapping(field.fields, visitor)) for field in fields] return visitor.fields(fields, results) @@ -174,5 +192,71 @@ def primitive(self, primitive: PrimitiveType) -> List[MappedField]: return [] +class _UpdateMapping(NameMappingVisitor[List[MappedField], MappedField]): + _updates: Dict[int, NestedField] + _adds: Dict[int, List[NestedField]] + + def __init__(self, updates: Dict[int, NestedField], adds: Dict[int, List[NestedField]]): + self._updates = updates + self._adds = adds + + @staticmethod + def _remove_reassigned_names(field: MappedField, assignments: Dict[str, int]) -> Optional[MappedField]: + removed_names = set() + for name in field.names: + if (assigned_id := assignments.get(name)) and assigned_id != field.field_id: + removed_names.add(name) + + remaining_names = [f for f in field.names if f not in removed_names] + if remaining_names: + return MappedField(field_id=field.field_id, names=remaining_names, fields=field.fields) + else: + return None + + def _add_new_fields(self, mapped_fields: List[MappedField], parent_id: int) -> List[MappedField]: + if fields_to_add := self._adds.get(parent_id): + fields: List[MappedField] = [] + new_fields: List[MappedField] = [] + + for add in fields_to_add: + new_fields.append( + MappedField(field_id=add.field_id, names=[add.name], fields=visit(add.field_type, _CreateMapping())) + ) + + reassignments = {f.name: f.field_id for f in fields_to_add} + fields = [ + updated_field + for field in mapped_fields + if (updated_field := self._remove_reassigned_names(field, reassignments)) is not None + ] + new_fields + return fields + else: + return mapped_fields + + def mapping(self, nm: NameMapping, field_results: List[MappedField]) -> List[MappedField]: + return self._add_new_fields(field_results, -1) + + def fields(self, struct: List[MappedField], field_results: List[MappedField]) -> List[MappedField]: + reassignments: Dict[str, int] = { + update.name: update.field_id for f in field_results if (update := self._updates.get(f.field_id)) + } + return [ + updated_field + for field in field_results + if (updated_field := self._remove_reassigned_names(field, reassignments)) is not None + ] + + def field(self, field: MappedField, field_result: List[MappedField]) -> MappedField: + field_names = field.names + if (update := self._updates.get(field.field_id)) is not None and update.name not in field_names: + field_names.append(update.name) + + return MappedField(field_id=field.field_id, names=field_names, fields=self._add_new_fields(field_result, field.field_id)) + + def create_mapping_from_schema(schema: Schema) -> NameMapping: return NameMapping(visit(schema, _CreateMapping())) + + +def update_mapping(mapping: NameMapping, updates: Dict[int, NestedField], adds: Dict[int, List[NestedField]]) -> NameMapping: + return NameMapping(visit_name_mapping(mapping, _UpdateMapping(updates, adds))) diff --git a/pyiceberg/table/snapshots.py b/pyiceberg/table/snapshots.py index e22c95f8ee..a2f15d4405 100644 --- a/pyiceberg/table/snapshots.py +++ b/pyiceberg/table/snapshots.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import time from enum import Enum from typing import ( Any, @@ -138,7 +139,7 @@ class Snapshot(IcebergBaseModel): snapshot_id: int = Field(alias="snapshot-id") parent_snapshot_id: Optional[int] = Field(alias="parent-snapshot-id", default=None) sequence_number: Optional[int] = Field(alias="sequence-number", default=None) - timestamp_ms: int = Field(alias="timestamp-ms") + timestamp_ms: int = Field(alias="timestamp-ms", default_factory=lambda: int(time.time() * 1000)) manifest_list: Optional[str] = Field( alias="manifest-list", description="Location of the snapshot's manifest list file", default=None ) @@ -277,23 +278,30 @@ def _truncate_table_summary(summary: Summary, previous_summary: Mapping[str, str }: summary[prop] = '0' - if value := previous_summary.get(TOTAL_DATA_FILES): - summary[DELETED_DATA_FILES] = value - if value := previous_summary.get(TOTAL_DELETE_FILES): - summary[REMOVED_DELETE_FILES] = value - if value := previous_summary.get(TOTAL_RECORDS): - summary[DELETED_RECORDS] = value - if value := previous_summary.get(TOTAL_FILE_SIZE): - summary[REMOVED_FILE_SIZE] = value - if value := previous_summary.get(TOTAL_POSITION_DELETES): - summary[REMOVED_POSITION_DELETES] = value - if value := previous_summary.get(TOTAL_EQUALITY_DELETES): - summary[REMOVED_EQUALITY_DELETES] = value + def get_prop(prop: str) -> int: + value = previous_summary.get(prop) or '0' + try: + return int(value) + except ValueError as e: + raise ValueError(f"Could not parse summary property {prop} to an int: {value}") from e + + if value := get_prop(TOTAL_DATA_FILES): + summary[DELETED_DATA_FILES] = str(value) + if value := get_prop(TOTAL_DELETE_FILES): + summary[REMOVED_DELETE_FILES] = str(value) + if value := get_prop(TOTAL_RECORDS): + summary[DELETED_RECORDS] = str(value) + if value := get_prop(TOTAL_FILE_SIZE): + summary[REMOVED_FILE_SIZE] = str(value) + if value := get_prop(TOTAL_POSITION_DELETES): + summary[REMOVED_POSITION_DELETES] = str(value) + if value := get_prop(TOTAL_EQUALITY_DELETES): + summary[REMOVED_EQUALITY_DELETES] = str(value) return summary -def _update_snapshot_summaries( +def update_snapshot_summaries( summary: Summary, previous_summary: Optional[Mapping[str, str]] = None, truncate_full_table: bool = False ) -> Summary: if summary.operation not in {Operation.APPEND, Operation.OVERWRITE}: diff --git a/pyiceberg/table/sorting.py b/pyiceberg/table/sorting.py index f970d687fa..f74d5bc701 100644 --- a/pyiceberg/table/sorting.py +++ b/pyiceberg/table/sorting.py @@ -168,7 +168,9 @@ def __repr__(self) -> str: UNSORTED_SORT_ORDER = SortOrder(order_id=UNSORTED_SORT_ORDER_ID) -def assign_fresh_sort_order_ids(sort_order: SortOrder, old_schema: Schema, fresh_schema: Schema) -> SortOrder: +def assign_fresh_sort_order_ids( + sort_order: SortOrder, old_schema: Schema, fresh_schema: Schema, sort_order_id: int = INITIAL_SORT_ORDER_ID +) -> SortOrder: if sort_order.is_unsorted: return UNSORTED_SORT_ORDER @@ -189,4 +191,4 @@ def assign_fresh_sort_order_ids(sort_order: SortOrder, old_schema: Schema, fresh ) ) - return SortOrder(*fresh_fields, order_id=INITIAL_SORT_ORDER_ID) + return SortOrder(*fresh_fields, order_id=sort_order_id) diff --git a/pyiceberg/transforms.py b/pyiceberg/transforms.py index b9afae9a7e..e678a77e69 100644 --- a/pyiceberg/transforms.py +++ b/pyiceberg/transforms.py @@ -132,20 +132,17 @@ class Transform(IcebergRootModel[str], ABC, Generic[S, T]): root: str = Field() @abstractmethod - def transform(self, source: IcebergType) -> Callable[[Optional[S]], Optional[T]]: - ... + def transform(self, source: IcebergType) -> Callable[[Optional[S]], Optional[T]]: ... @abstractmethod def can_transform(self, source: IcebergType) -> bool: return False @abstractmethod - def result_type(self, source: IcebergType) -> IcebergType: - ... + def result_type(self, source: IcebergType) -> IcebergType: ... @abstractmethod - def project(self, name: str, pred: BoundPredicate[L]) -> Optional[UnboundPredicate[Any]]: - ... + def project(self, name: str, pred: BoundPredicate[L]) -> Optional[UnboundPredicate[Any]]: ... @property def preserves_order(self) -> bool: @@ -285,8 +282,7 @@ class TimeResolution(IntEnum): class TimeTransform(Transform[S, int], Generic[S], Singleton): @property @abstractmethod - def granularity(self) -> TimeResolution: - ... + def granularity(self) -> TimeResolution: ... def satisfies_order_of(self, other: Transform[S, T]) -> bool: return self.granularity <= other.granularity if hasattr(other, "granularity") else False @@ -295,8 +291,7 @@ def result_type(self, source: IcebergType) -> IntegerType: return IntegerType() @abstractmethod - def transform(self, source: IcebergType) -> Callable[[Optional[Any]], Optional[int]]: - ... + def transform(self, source: IcebergType) -> Callable[[Optional[Any]], Optional[int]]: ... def project(self, name: str, pred: BoundPredicate[L]) -> Optional[UnboundPredicate[Any]]: transformer = self.transform(pred.term.ref().field.field_type) @@ -387,7 +382,7 @@ def month_func(v: Any) -> int: else: raise ValueError(f"Cannot apply month transform for type: {source}") - return lambda v: month_func(v) if v else None + return lambda v: month_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: return isinstance(source, (DateType, TimestampType, TimestamptzType)) @@ -429,7 +424,7 @@ def day_func(v: Any) -> int: else: raise ValueError(f"Cannot apply day transform for type: {source}") - return lambda v: day_func(v) if v else None + return lambda v: day_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: return isinstance(source, (DateType, TimestampType, TimestamptzType)) @@ -469,7 +464,7 @@ def hour_func(v: Any) -> int: else: raise ValueError(f"Cannot apply hour transform for type: {source}") - return lambda v: hour_func(v) if v else None + return lambda v: hour_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: return isinstance(source, (TimestampType, TimestamptzType)) @@ -618,7 +613,7 @@ def truncate_func(v: Any) -> Any: else: raise ValueError(f"Cannot truncate for type: {source}") - return lambda v: truncate_func(v) if v else None + return lambda v: truncate_func(v) if v is not None else None def satisfies_order_of(self, other: Transform[S, T]) -> bool: if self == other: @@ -660,6 +655,11 @@ def _(value: int, _type: IcebergType) -> str: return _int_to_human_string(_type, value) +@_human_string.register(bool) +def _(value: bool, _type: IcebergType) -> str: + return str(value).lower() + + @singledispatch def _int_to_human_string(_type: IcebergType, value: int) -> str: return str(value) diff --git a/pyiceberg/typedef.py b/pyiceberg/typedef.py index 56a3d3c72d..e57bf3490c 100644 --- a/pyiceberg/typedef.py +++ b/pyiceberg/typedef.py @@ -73,7 +73,7 @@ def __missing__(self, key: K) -> V: Identifier = Tuple[str, ...] -Properties = Dict[str, str] +Properties = Dict[str, Any] RecursiveDict = Dict[str, Union[str, "RecursiveDict"]] # Represents the literal value diff --git a/pyiceberg/types.py b/pyiceberg/types.py index b8fdedea51..746f03ea0b 100644 --- a/pyiceberg/types.py +++ b/pyiceberg/types.py @@ -29,6 +29,7 @@ Notes: - https://iceberg.apache.org/#spec/#primitive-types """ + from __future__ import annotations import re @@ -36,6 +37,7 @@ from typing import ( Any, ClassVar, + Dict, Literal, Optional, Tuple, @@ -60,6 +62,14 @@ FIXED_PARSER = ParseNumberFromBrackets(FIXED) +def transform_dict_value_to_str(dict: Dict[str, Any]) -> Dict[str, str]: + """Transform all values in the dictionary to string. Raise an error if any value is None.""" + for key, value in dict.items(): + if value is None: + raise ValueError(f"None type is not a supported value in properties: {key}") + return {k: str(v) for k, v in dict.items()} + + def _parse_decimal_type(decimal: Any) -> Tuple[int, int]: if isinstance(decimal, str): matches = DECIMAL_REGEX.search(decimal) @@ -349,6 +359,18 @@ def field(self, field_id: int) -> Optional[NestedField]: return field return None + def field_by_name(self, name: str, case_sensitive: bool = True) -> Optional[NestedField]: + if case_sensitive: + name_lower = name.lower() + for field in self.fields: + if field.name.lower() == name_lower: + return field + else: + for field in self.fields: + if field.name == name: + return field + return None + def __str__(self) -> str: """Return the string representation of the StructType class.""" return f"struct<{', '.join(map(str, self.fields))}>" diff --git a/pyiceberg/utils/concurrent.py b/pyiceberg/utils/concurrent.py index 75d6ff8365..805599bf41 100644 --- a/pyiceberg/utils/concurrent.py +++ b/pyiceberg/utils/concurrent.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. """Concurrency concepts that support efficient multi-threading.""" + from concurrent.futures import Executor, ThreadPoolExecutor from typing import Optional @@ -36,13 +37,4 @@ def get_or_create() -> Executor: @staticmethod def max_workers() -> Optional[int]: """Return the max number of workers configured.""" - config = Config() - val = config.config.get("max-workers") - - if val is None: - return None - - try: - return int(val) # type: ignore - except ValueError as err: - raise ValueError(f"Max workers should be an integer or left unset. Current value: {val}") from err + return Config().get_int("max-workers") diff --git a/pyiceberg/utils/config.py b/pyiceberg/utils/config.py index 31ba0b36ed..8b1b81d3a7 100644 --- a/pyiceberg/utils/config.py +++ b/pyiceberg/utils/config.py @@ -16,6 +16,7 @@ # under the License. import logging import os +from distutils.util import strtobool from typing import List, Optional import strictyaml @@ -125,8 +126,8 @@ def set_property(_config: RecursiveDict, path: List[str], config_value: str) -> env_var_lower = env_var.lower() if env_var_lower.startswith(PYICEBERG.lower()): key = env_var_lower[len(PYICEBERG) :] - parts = key.split("__") - parts_normalized = [part.replace("_", "-") for part in parts] + parts = key.split("__", maxsplit=2) + parts_normalized = [part.replace('__', '.').replace("_", "-") for part in parts] set_property(config, parts_normalized, config_value) return config @@ -154,3 +155,19 @@ def get_catalog_config(self, catalog_name: str) -> Optional[RecursiveDict]: assert isinstance(catalog_conf, dict), f"Configuration path catalogs.{catalog_name_lower} needs to be an object" return catalog_conf return None + + def get_int(self, key: str) -> Optional[int]: + if (val := self.config.get(key)) is not None: + try: + return int(val) # type: ignore + except ValueError as err: + raise ValueError(f"{key} should be an integer or left unset. Current value: {val}") from err + return None + + def get_bool(self, key: str) -> Optional[bool]: + if (val := self.config.get(key)) is not None: + try: + return strtobool(val) # type: ignore + except ValueError as err: + raise ValueError(f"{key} should be a boolean or left unset. Current value: {val}") from err + return None diff --git a/pyiceberg/utils/datetime.py b/pyiceberg/utils/datetime.py index 1ab56d6ea3..0cb6926efa 100644 --- a/pyiceberg/utils/datetime.py +++ b/pyiceberg/utils/datetime.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. """Helper methods for working with date/time representations.""" + from __future__ import annotations import re diff --git a/pyiceberg/utils/decimal.py b/pyiceberg/utils/decimal.py index 503545b69b..c2125d6734 100644 --- a/pyiceberg/utils/decimal.py +++ b/pyiceberg/utils/decimal.py @@ -16,6 +16,7 @@ # under the License. """Helper methods for working with Python Decimals.""" + import math from decimal import Decimal from typing import Optional, Union diff --git a/pyiceberg/utils/schema_conversion.py b/pyiceberg/utils/schema_conversion.py index d4b7aab4f1..2cceecc639 100644 --- a/pyiceberg/utils/schema_conversion.py +++ b/pyiceberg/utils/schema_conversion.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. """Utility class for converting between Avro and Iceberg schemas.""" + import logging from typing import ( Any, diff --git a/pyiceberg/utils/singleton.py b/pyiceberg/utils/singleton.py index 9380b89cbc..90e909ecbc 100644 --- a/pyiceberg/utils/singleton.py +++ b/pyiceberg/utils/singleton.py @@ -27,6 +27,7 @@ More information on metaclasses: https://docs.python.org/3/reference/datamodel.html#metaclasses """ + from typing import Any, ClassVar, Dict diff --git a/pyproject.toml b/pyproject.toml index 1bd4b7d5ec..d92ceb5fc7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,42 +48,45 @@ include = [ [tool.poetry.dependencies] python = "^3.8" -mmhash3 = ">=3.0.0,<4.0.0" +mmh3 = ">=4.0.0,<5.0.0" requests = ">=2.20.0,<3.0.0" click = ">=7.1.1,<9.0.0" rich = ">=10.11.0,<14.0.0" strictyaml = ">=1.7.0,<2.0.0" # CVE-2020-14343 was fixed in 5.4. pydantic = ">=2.0,<3.0,!=2.4.0,!=2.4.1" # 2.4.0, 2.4.1 has a critical bug sortedcontainers = "2.4.0" -fsspec = ">=2023.1.0,<2024.1.0" +fsspec = ">=2023.1.0,<2025.1.0" pyparsing = ">=3.1.0,<4.0.0" zstandard = ">=0.13.0,<1.0.0" -pyarrow = { version = ">=9.0.0,<15.0.0", optional = true } +tenacity = ">=8.2.3,<9.0.0" +pyarrow = { version = ">=9.0.0,<16.0.0", optional = true } pandas = { version = ">=1.0.0,<3.0.0", optional = true } duckdb = { version = ">=0.5.0,<1.0.0", optional = true } -ray = { version = ">=2.0.0,<3.0.0", optional = true } +ray = { version = ">=2.0.0,<2.10.0", optional = true } python-snappy = { version = ">=0.6.0,<1.0.0", optional = true } thrift = { version = ">=0.13.0,<1.0.0", optional = true } mypy-boto3-glue = { version = ">=1.28.18", optional = true } boto3 = { version = ">=1.24.59", optional = true } s3fs = { version = ">=2023.1.0,<2024.1.0", optional = true } -adlfs = { version = ">=2023.1.0,<2024.1.0", optional = true } +adlfs = { version = ">=2023.1.0,<2024.3.0", optional = true } gcsfs = { version = ">=2023.1.0,<2024.1.0", optional = true } psycopg2-binary = { version = ">=2.9.6", optional = true } sqlalchemy = { version = "^2.0.18", optional = true } +getdaft = { version = ">=0.2.12", optional = true } [tool.poetry.dev-dependencies] -pytest = "7.4.3" +pytest = "7.4.4" pytest-checkdocs = "2.10.1" pytest-lazy-fixture = "0.6.3" pre-commit = "3.5.0" -fastavro = "1.9.2" -coverage = { version = "^7.3.4", extras = ["toml"] } +fastavro = "1.9.4" +coverage = { version = "^7.4.2", extras = ["toml"] } requests-mock = "1.11.0" -moto = { version = "^4.2.12", extras = ["server"] } +moto = { version = "^5.0.2", extras = ["server"] } typing-extensions = "4.9.0" pytest-mock = "3.12.0" -cython = "3.0.7" +pyspark = "3.5.0" +cython = "3.0.8" [[tool.mypy.overrides]] module = "pytest_mock.*" @@ -105,6 +108,7 @@ pyarrow = ["pyarrow"] pandas = ["pandas", "pyarrow"] duckdb = ["duckdb", "pyarrow"] ray = ["ray", "pyarrow", "pandas"] +daft = ["getdaft"] snappy = ["python-snappy"] hive = ["thrift"] s3fs = ["s3fs"] @@ -113,6 +117,7 @@ adlfs = ["adlfs"] dynamodb = ["boto3"] zstandard = ["zstandard"] sql-postgres = ["sqlalchemy", "psycopg2-binary"] +sql-sqlite = ["sqlalchemy"] gcsfs = ["gcsfs"] [tool.pytest.ini_options] @@ -145,6 +150,7 @@ warn_unreachable = true warn_unused_ignores = true disallow_any_generics = true disallow_untyped_defs = true +implicit_reexport = false [[tool.mypy.overrides]] module = "pyarrow.*" @@ -262,6 +268,10 @@ ignore_missing_imports = true module = "ray.*" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "daft.*" +ignore_missing_imports = true + [[tool.mypy.overrides]] module = "pyparsing.*" ignore_missing_imports = true @@ -294,13 +304,17 @@ ignore_missing_imports = true module = "setuptools.*" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "tenacity.*" +ignore_missing_imports = true + [tool.coverage.run] source = ['pyiceberg/'] [tool.ruff] src = ['pyiceberg','tests'] extend-exclude = ["dev/provision.py"] -select = [ +lint.select = [ "E", # pycodestyle "W", # pycodestyle "F", # Pyflakes @@ -310,11 +324,11 @@ select = [ "I", # isort "UP", # pyupgrade ] -ignore = ["E501","E203","B024","B028"] +lint.ignore = ["E501","E203","B024","B028","UP037"] # Allow autofix for all enabled rules (when `--fix`) is provided. -fixable = ["ALL"] -unfixable = [] +lint.fixable = ["ALL"] +lint.unfixable = [] # Exclude a variety of commonly ignored directories. exclude = [ @@ -340,20 +354,23 @@ exclude = [ "node_modules", "venv", ] -per-file-ignores = {} +lint.per-file-ignores = {} # Ignore _all_ violations. # Same as Black. line-length = 130 # Allow unused variables when underscore-prefixed. -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +lint.dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" -[tool.ruff.pyupgrade] +[tool.ruff.lint.pyupgrade] # Preserve types, even if a file imports `from __future__ import annotations`. keep-runtime-typing = true -[tool.ruff.isort] +[tool.ruff.lint.isort] detect-same-package = true lines-between-types = 0 known-first-party = ["pyiceberg", "tests"] section-order = ["future", "standard-library", "third-party", "first-party", "local-folder"] + +[tool.ruff.format] +quote-style = "preserve" diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index 13a83393a9..0000000000 --- a/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/tests/avro/test_decoder.py b/tests/avro/test_decoder.py index bbcc7394f4..608e6ae2d5 100644 --- a/tests/avro/test_decoder.py +++ b/tests/avro/test_decoder.py @@ -144,13 +144,13 @@ def test_read_single_byte_at_the_time(decoder_class: Callable[[bytes], BinaryDec @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_read_float(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x00\x00\x9A\x41") + decoder = decoder_class(b"\x00\x00\x9a\x41") assert decoder.read_float() == 19.25 @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_skip_float(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x00\x00\x9A\x41") + decoder = decoder_class(b"\x00\x00\x9a\x41") assert decoder.tell() == 0 decoder.skip_float() assert decoder.tell() == 4 @@ -179,13 +179,13 @@ def test_read_bytes(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_read_utf8(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x04\x76\x6F") + decoder = decoder_class(b"\x04\x76\x6f") assert decoder.read_utf8() == "vo" @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_skip_utf8(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x04\x76\x6F") + decoder = decoder_class(b"\x04\x76\x6f") assert decoder.tell() == 0 decoder.skip_utf8() assert decoder.tell() == 3 @@ -193,7 +193,7 @@ def test_skip_utf8(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_read_int_as_float(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x00\x00\x9A\x41") + decoder = decoder_class(b"\x00\x00\x9a\x41") reader = resolve_reader(FloatType(), DoubleType()) assert reader.read(decoder) == 19.25 diff --git a/tests/avro/test_file.py b/tests/avro/test_file.py index 74458fd923..8a3aaf72cb 100644 --- a/tests/avro/test_file.py +++ b/tests/avro/test_file.py @@ -26,7 +26,7 @@ from fastavro import reader, writer import pyiceberg.avro.file as avro -from pyiceberg.avro.codecs import DeflateCodec +from pyiceberg.avro.codecs.deflate import DeflateCodec from pyiceberg.avro.file import META_SCHEMA, AvroFileHeader from pyiceberg.io.pyarrow import PyArrowFileIO from pyiceberg.manifest import ( diff --git a/tests/avro/test_reader.py b/tests/avro/test_reader.py index 48ee8911da..c97d421d87 100644 --- a/tests/avro/test_reader.py +++ b/tests/avro/test_reader.py @@ -353,7 +353,9 @@ def test_read_struct_lambda(decoder_class: Callable[[bytes], BinaryDecoder]) -> struct = StructType(NestedField(1, "id", IntegerType(), required=True)) # You can also pass in an arbitrary function that returns a struct result = StructReader( - ((0, IntegerReader()),), lambda struct: Record(struct=struct), struct # pylint: disable=unnecessary-lambda + ((0, IntegerReader()),), + lambda struct: Record(struct=struct), + struct, # pylint: disable=unnecessary-lambda ).read(decoder) assert repr(result) == "Record[id=12]" diff --git a/tests/avro/test_resolver.py b/tests/avro/test_resolver.py index 51d2a7d8fc..07d41491fa 100644 --- a/tests/avro/test_resolver.py +++ b/tests/avro/test_resolver.py @@ -32,7 +32,7 @@ StringReader, StructReader, ) -from pyiceberg.avro.resolver import ResolveError, resolve_reader, resolve_writer +from pyiceberg.avro.resolver import resolve_reader, resolve_writer from pyiceberg.avro.writer import ( BinaryWriter, DefaultWriter, @@ -44,6 +44,7 @@ StringWriter, StructWriter, ) +from pyiceberg.exceptions import ResolveError from pyiceberg.io.pyarrow import PyArrowFileIO from pyiceberg.manifest import MANIFEST_ENTRY_SCHEMAS from pyiceberg.schema import Schema @@ -318,34 +319,30 @@ def test_resolver_initial_value() -> None: def test_resolve_writer() -> None: actual = resolve_writer(record_schema=MANIFEST_ENTRY_SCHEMAS[2], file_schema=MANIFEST_ENTRY_SCHEMAS[1]) - expected = StructWriter( + expected = StructWriter(( + (0, IntegerWriter()), + (1, IntegerWriter()), ( - (0, IntegerWriter()), - (1, IntegerWriter()), - ( - 4, - StructWriter( - ( - (1, StringWriter()), - (2, StringWriter()), - (3, StructWriter(())), - (4, IntegerWriter()), - (5, IntegerWriter()), - (None, DefaultWriter(writer=IntegerWriter(), value=67108864)), - (6, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), - (7, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), - (8, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), - (9, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), - (10, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), - (11, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), - (12, OptionWriter(option=BinaryWriter())), - (13, OptionWriter(option=ListWriter(element_writer=IntegerWriter()))), - (15, OptionWriter(option=IntegerWriter())), - ) - ), - ), - ) - ) + 4, + StructWriter(( + (1, StringWriter()), + (2, StringWriter()), + (3, StructWriter(())), + (4, IntegerWriter()), + (5, IntegerWriter()), + (None, DefaultWriter(writer=IntegerWriter(), value=67108864)), + (6, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (7, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (8, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (9, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (10, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), + (11, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), + (12, OptionWriter(option=BinaryWriter())), + (13, OptionWriter(option=ListWriter(element_writer=IntegerWriter()))), + (15, OptionWriter(option=IntegerWriter())), + )), + ), + )) assert actual == expected diff --git a/tests/avro/test_writer.py b/tests/avro/test_writer.py index 991d9d1ae7..0bae9ece8c 100644 --- a/tests/avro/test_writer.py +++ b/tests/avro/test_writer.py @@ -178,17 +178,15 @@ class MyStruct(Record): construct_writer(schema).write(encoder, my_struct) - assert output.getbuffer() == b"".join( - [ - b"\x18", - zigzag_encode(len(my_struct.properties)), - zigzag_encode(1), - zigzag_encode(2), - zigzag_encode(3), - zigzag_encode(4), - b"\x00", - ] - ) + assert output.getbuffer() == b"".join([ + b"\x18", + zigzag_encode(len(my_struct.properties)), + zigzag_encode(1), + zigzag_encode(2), + zigzag_encode(3), + zigzag_encode(4), + b"\x00", + ]) def test_write_struct_with_list() -> None: @@ -208,17 +206,15 @@ class MyStruct(Record): construct_writer(schema).write(encoder, my_struct) - assert output.getbuffer() == b"".join( - [ - b"\x18", - zigzag_encode(len(my_struct.properties)), - zigzag_encode(1), - zigzag_encode(2), - zigzag_encode(3), - zigzag_encode(4), - b"\x00", - ] - ) + assert output.getbuffer() == b"".join([ + b"\x18", + zigzag_encode(len(my_struct.properties)), + zigzag_encode(1), + zigzag_encode(2), + zigzag_encode(3), + zigzag_encode(4), + b"\x00", + ]) def test_write_decimal() -> None: diff --git a/tests/catalog/integration_test_dynamodb.py b/tests/catalog/integration_test_dynamodb.py index 5ca8767d6d..591e489b83 100644 --- a/tests/catalog/integration_test_dynamodb.py +++ b/tests/catalog/integration_test_dynamodb.py @@ -96,6 +96,15 @@ def test_create_duplicated_table(test_catalog: Catalog, table_schema_nested: Sch test_catalog.create_table((database_name, table_name), table_schema_nested) +def test_create_table_if_not_exists_duplicated_table( + test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + test_catalog.create_namespace(database_name) + table1 = test_catalog.create_table((database_name, table_name), table_schema_nested) + table2 = test_catalog.create_table_if_not_exists((database_name, table_name), table_schema_nested) + assert table1.identifier == table2.identifier + + def test_load_table(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog.create_namespace(database_name) diff --git a/tests/catalog/integration_test_glue.py b/tests/catalog/integration_test_glue.py index 2689ef14d3..a685b7da7b 100644 --- a/tests/catalog/integration_test_glue.py +++ b/tests/catalog/integration_test_glue.py @@ -15,9 +15,12 @@ # specific language governing permissions and limitations # under the License. -from typing import Generator, List +import time +from typing import Any, Dict, Generator, List +from uuid import uuid4 import boto3 +import pyarrow as pa import pytest from botocore.exceptions import ClientError @@ -30,7 +33,9 @@ NoSuchTableError, TableAlreadyExistsError, ) +from pyiceberg.io.pyarrow import schema_to_pyarrow from pyiceberg.schema import Schema +from pyiceberg.types import IntegerType from tests.conftest import clean_up, get_bucket_name, get_s3_path # The number of tables/databases used in list_table/namespace test @@ -51,8 +56,62 @@ def fixture_test_catalog() -> Generator[Catalog, None, None]: clean_up(test_catalog) +class AthenaQueryHelper: + _athena_client: boto3.client + _s3_resource: boto3.resource + _output_bucket: str + _output_path: str + + def __init__(self) -> None: + self._s3_resource = boto3.resource("s3") + self._athena_client = boto3.client("athena") + self._output_bucket = get_bucket_name() + self._output_path = f"athena_results_{uuid4()}" + + def get_query_results(self, query: str) -> List[Dict[str, Any]]: + query_execution_id = self._athena_client.start_query_execution( + QueryString=query, ResultConfiguration={"OutputLocation": f"s3://{self._output_bucket}/{self._output_path}"} + )["QueryExecutionId"] + + while True: + result = self._athena_client.get_query_execution(QueryExecutionId=query_execution_id)["QueryExecution"]["Status"] + query_status = result["State"] + assert query_status not in [ + "FAILED", + "CANCELLED", + ], f""" + Athena query with the string failed or was cancelled: + Query: {query} + Status: {query_status} + Reason: {result["StateChangeReason"]}""" + + if query_status not in ["QUEUED", "RUNNING"]: + break + time.sleep(0.5) + + # No pagination for now, assume that we are not doing large queries + return self._athena_client.get_query_results(QueryExecutionId=query_execution_id)["ResultSet"]["Rows"] + + def clean_up(self) -> None: + bucket = self._s3_resource.Bucket(self._output_bucket) + for obj in bucket.objects.filter(Prefix=f"{self._output_path}/"): + self._s3_resource.Object(bucket.name, obj.key).delete() + + +@pytest.fixture(name="athena", scope="module") +def fixture_athena_helper() -> Generator[AthenaQueryHelper, None, None]: + query_helper = AthenaQueryHelper() + yield query_helper + query_helper.clean_up() + + def test_create_table( - test_catalog: Catalog, s3: boto3.client, table_schema_nested: Schema, table_name: str, database_name: str + test_catalog: Catalog, + s3: boto3.client, + table_schema_nested: Schema, + table_name: str, + database_name: str, + athena: AthenaQueryHelper, ) -> None: identifier = (database_name, table_name) test_catalog.create_namespace(database_name) @@ -61,6 +120,49 @@ def test_create_table( assert table.identifier == (CATALOG_NAME,) + identifier metadata_location = table.metadata_location.split(get_bucket_name())[1][1:] s3.head_object(Bucket=get_bucket_name(), Key=metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + + table.append( + pa.Table.from_pylist( + [ + { + "foo": "foo_val", + "bar": 1, + "baz": False, + "qux": ["x", "y"], + "quux": {"key": {"subkey": 2}}, + "location": [{"latitude": 1.1}], + "person": {"name": "some_name", "age": 23}, + } + ], + schema=schema_to_pyarrow(table.schema()), + ), + ) + + assert athena.get_query_results(f'SELECT * FROM "{database_name}"."{table_name}"') == [ + { + "Data": [ + {"VarCharValue": "foo"}, + {"VarCharValue": "bar"}, + {"VarCharValue": "baz"}, + {"VarCharValue": "qux"}, + {"VarCharValue": "quux"}, + {"VarCharValue": "location"}, + {"VarCharValue": "person"}, + ] + }, + { + "Data": [ + {"VarCharValue": "foo_val"}, + {"VarCharValue": "1"}, + {"VarCharValue": "false"}, + {"VarCharValue": "[x, y]"}, + {"VarCharValue": "{key={subkey=2}}"}, + {"VarCharValue": "[{latitude=1.1, longitude=null}]"}, + {"VarCharValue": "{name=some_name, age=23}"}, + ] + }, + ] def test_create_table_with_invalid_location(table_schema_nested: Schema, table_name: str, database_name: str) -> None: @@ -82,6 +184,7 @@ def test_create_table_with_default_location( assert table.identifier == (CATALOG_NAME,) + identifier metadata_location = table.metadata_location.split(get_bucket_name())[1][1:] s3.head_object(Bucket=get_bucket_name(), Key=metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 def test_create_table_with_invalid_database(test_catalog: Catalog, table_schema_nested: Schema, table_name: str) -> None: @@ -97,6 +200,15 @@ def test_create_duplicated_table(test_catalog: Catalog, table_schema_nested: Sch test_catalog.create_table((database_name, table_name), table_schema_nested) +def test_create_table_if_not_exists_duplicated_table( + test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str +) -> None: + test_catalog.create_namespace(database_name) + table1 = test_catalog.create_table((database_name, table_name), table_schema_nested) + table2 = test_catalog.create_table_if_not_exists((database_name, table_name), table_schema_nested) + assert table1.identifier == table2.identifier + + def test_load_table(test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str) -> None: identifier = (database_name, table_name) test_catalog.create_namespace(database_name) @@ -105,6 +217,7 @@ def test_load_table(test_catalog: Catalog, table_schema_nested: Schema, table_na assert table.identifier == loaded_table.identifier assert table.metadata_location == loaded_table.metadata_location assert table.metadata == loaded_table.metadata + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 def test_list_tables(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_list: List[str]) -> None: @@ -126,6 +239,7 @@ def test_rename_table( new_table_name = f"rename-{table_name}" identifier = (database_name, table_name) table = test_catalog.create_table(identifier, table_schema_nested) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 assert table.identifier == (CATALOG_NAME,) + identifier new_identifier = (new_database_name, new_table_name) test_catalog.rename_table(identifier, new_identifier) @@ -261,3 +375,104 @@ def test_update_namespace_properties(test_catalog: Catalog, database_name: str) else: assert k in update_report.removed assert "updated test description" == test_catalog.load_namespace_properties(database_name)["comment"] + + +def test_commit_table_update_schema( + test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str, athena: AthenaQueryHelper +) -> None: + identifier = (database_name, table_name) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier, table_schema_nested) + original_table_metadata = table.metadata + + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + assert original_table_metadata.current_schema_id == 0 + + assert athena.get_query_results(f'SELECT * FROM "{database_name}"."{table_name}"') == [ + { + "Data": [ + {"VarCharValue": "foo"}, + {"VarCharValue": "bar"}, + {"VarCharValue": "baz"}, + {"VarCharValue": "qux"}, + {"VarCharValue": "quux"}, + {"VarCharValue": "location"}, + {"VarCharValue": "person"}, + ] + } + ] + + transaction = table.transaction() + update = transaction.update_schema() + update.add_column(path="b", field_type=IntegerType()) + update.commit() + transaction.commit_transaction() + + updated_table_metadata = table.metadata + + assert test_catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.current_schema_id == 1 + assert len(updated_table_metadata.schemas) == 2 + new_schema = next(schema for schema in updated_table_metadata.schemas if schema.schema_id == 1) + assert new_schema + assert new_schema == update._apply() + assert new_schema.find_field("b").field_type == IntegerType() + + table.append( + pa.Table.from_pylist( + [ + { + "foo": "foo_val", + "bar": 1, + "location": [{"latitude": 1.1}], + "person": {"name": "some_name", "age": 23}, + "b": 2, + } + ], + schema=schema_to_pyarrow(new_schema), + ), + ) + + assert athena.get_query_results(f'SELECT * FROM "{database_name}"."{table_name}"') == [ + { + "Data": [ + {"VarCharValue": "foo"}, + {"VarCharValue": "bar"}, + {"VarCharValue": "baz"}, + {"VarCharValue": "qux"}, + {"VarCharValue": "quux"}, + {"VarCharValue": "location"}, + {"VarCharValue": "person"}, + {"VarCharValue": "b"}, + ] + }, + { + "Data": [ + {"VarCharValue": "foo_val"}, + {"VarCharValue": "1"}, + {}, + {"VarCharValue": "[]"}, + {"VarCharValue": "{}"}, + {"VarCharValue": "[{latitude=1.1, longitude=null}]"}, + {"VarCharValue": "{name=some_name, age=23}"}, + {"VarCharValue": "2"}, + ] + }, + ] + + +def test_commit_table_properties(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str) -> None: + identifier = (database_name, table_name) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier=identifier, schema=table_schema_nested, properties={"test_a": "test_a"}) + + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + + transaction = table.transaction() + transaction.set_properties(test_a="test_aa", test_b="test_b", test_c="test_c") + transaction.remove_properties("test_b") + transaction.commit_transaction() + + updated_table_metadata = table.metadata + assert test_catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.properties == {"test_a": "test_aa", "test_c": "test_c"} diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index c7cd1c4fa1..44c36a7d2d 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -16,6 +16,9 @@ # under the License. # pylint:disable=redefined-outer-name + +import uuid +from pathlib import PosixPath from typing import ( Dict, List, @@ -24,12 +27,13 @@ Union, ) +import pyarrow as pa import pytest +from pydantic_core import ValidationError +from pytest_lazyfixture import lazy_fixture from pyiceberg.catalog import ( Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( @@ -39,7 +43,7 @@ NoSuchTableError, TableAlreadyExistsError, ) -from pyiceberg.io import load_file_io +from pyiceberg.io import WAREHOUSE, load_file_io from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionField, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.table import ( @@ -50,16 +54,23 @@ SetCurrentSchemaUpdate, Table, TableIdentifier, + update_table_metadata, ) -from pyiceberg.table.metadata import TableMetadata, TableMetadataV1, new_table_metadata +from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.transforms import IdentityTransform -from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties from pyiceberg.types import IntegerType, LongType, NestedField +DEFAULT_WAREHOUSE_LOCATION = "file:///tmp/warehouse" + class InMemoryCatalog(Catalog): - """An in-memory catalog implementation for testing purposes.""" + """ + An in-memory catalog implementation that uses in-memory data-structures to store the namespaces and tables. + + This is useful for test, demo, and playground but not in production as data is not persisted. + """ __tables: Dict[Identifier, Table] __namespaces: Dict[Identifier, Properties] @@ -68,16 +79,20 @@ def __init__(self, name: str, **properties: str) -> None: super().__init__(name, **properties) self.__tables = {} self.__namespaces = {} + self._warehouse_location = properties.get(WAREHOUSE, DEFAULT_WAREHOUSE_LOCATION) def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, properties: Properties = EMPTY_DICT, + table_uuid: Optional[uuid.UUID] = None, ) -> Table: + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + identifier = Catalog.identifier_to_tuple(identifier) namespace = Catalog.namespace_from(identifier) @@ -87,26 +102,26 @@ def create_table( if namespace not in self.__namespaces: self.__namespaces[namespace] = {} - new_location = location or f's3://warehouse/{"/".join(identifier)}/data' - metadata = TableMetadataV1( - **{ - "format-version": 1, - "table-uuid": "d20125c8-7284-442c-9aea-15fee620737c", - "location": new_location, - "last-updated-ms": 1602638573874, - "last-column-id": schema.highest_field_id, - "schema": schema.model_dump(), - "partition-spec": partition_spec.model_dump()["fields"], - "properties": properties, - "current-snapshot-id": -1, - "snapshots": [{"snapshot-id": 1925, "timestamp-ms": 1602638573822}], - } + if not location: + location = f'{self._warehouse_location}/{"/".join(identifier)}' + + metadata_location = self._get_metadata_location(location=location) + metadata = new_table_metadata( + schema=schema, + partition_spec=partition_spec, + sort_order=sort_order, + location=location, + properties=properties, + table_uuid=table_uuid, ) + io = load_file_io({**self.properties, **properties}, location=location) + self._write_metadata(metadata, io, metadata_location) + table = Table( identifier=identifier, metadata=metadata, - metadata_location=f's3://warehouse/{"/".join(identifier)}/metadata/metadata.json', - io=load_file_io(), + metadata_location=metadata_location, + io=io, catalog=self, ) self.__tables[identifier] = table @@ -116,37 +131,29 @@ def register_table(self, identifier: Union[str, Identifier], metadata_location: raise NotImplementedError def _commit_table(self, table_request: CommitTableRequest) -> CommitTableResponse: - new_metadata: Optional[TableMetadata] = None - metadata_location = "" - for update in table_request.updates: - if isinstance(update, AddSchemaUpdate): - add_schema_update: AddSchemaUpdate = update - identifier = tuple(table_request.identifier.namespace.root) + (table_request.identifier.name,) - table = self.__tables[identifier] - new_metadata = new_table_metadata( - add_schema_update.schema_, - table.metadata.partition_specs[0], - table.sort_order(), - table.location(), - table.properties, - table.metadata.table_uuid, - ) - - table = Table( - identifier=identifier, - metadata=new_metadata, - metadata_location=f's3://warehouse/{"/".join(identifier)}/metadata/metadata.json', - io=load_file_io(), - catalog=self, - ) - - self.__tables[identifier] = table - metadata_location = f's3://warehouse/{"/".join(identifier)}/metadata/metadata.json' - - return CommitTableResponse( - metadata=new_metadata.model_dump() if new_metadata else {}, - metadata_location=metadata_location if metadata_location else "", + identifier_tuple = self.identifier_to_tuple_without_catalog( + tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) ) + current_table = self.load_table(identifier_tuple) + base_metadata = current_table.metadata + + for requirement in table_request.requirements: + requirement.validate(base_metadata) + + updated_metadata = update_table_metadata(base_metadata, table_request.updates) + if updated_metadata == base_metadata: + # no changes, do nothing + return CommitTableResponse(metadata=base_metadata, metadata_location=current_table.metadata_location) + + # write new metadata + new_metadata_version = self._parse_metadata_version(current_table.metadata_location) + 1 + new_metadata_location = self._get_metadata_location(current_table.metadata.location, new_metadata_version) + self._write_metadata(updated_metadata, current_table.io, new_metadata_location) + + # update table state + current_table.metadata = updated_metadata + + return CommitTableResponse(metadata=updated_metadata, metadata_location=new_metadata_location) def load_table(self, identifier: Union[str, Identifier]) -> Table: identifier = self.identifier_to_tuple_without_catalog(identifier) @@ -181,7 +188,7 @@ def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: U identifier=to_identifier, metadata=table.metadata, metadata_location=table.metadata_location, - io=load_file_io(), + io=self._load_file_io(properties=table.metadata.properties, location=table.metadata_location), catalog=self, ) return self.__tables[to_identifier] @@ -253,8 +260,8 @@ def update_namespace_properties( @pytest.fixture -def catalog() -> InMemoryCatalog: - return InMemoryCatalog("test.in.memory.catalog", **{"test.key": "test.value"}) +def catalog(tmp_path: PosixPath) -> InMemoryCatalog: + return InMemoryCatalog("test.in_memory.catalog", **{WAREHOUSE: tmp_path.absolute().as_posix(), "test.key": "test.value"}) TEST_TABLE_IDENTIFIER = ("com", "organization", "department", "my_table") @@ -265,7 +272,6 @@ def catalog() -> InMemoryCatalog: NestedField(2, "y", LongType(), doc="comment"), NestedField(3, "z", LongType()), ) -TEST_TABLE_LOCATION = "protocol://some/location" TEST_TABLE_PARTITION_SPEC = PartitionSpec(PartitionField(name="x", transform=IdentityTransform(), source_id=1, field_id=1000)) TEST_TABLE_PROPERTIES = {"key1": "value1", "key2": "value2"} NO_SUCH_TABLE_ERROR = "Table does not exist: \\('com', 'organization', 'department', 'my_table'\\)" @@ -275,13 +281,15 @@ def catalog() -> InMemoryCatalog: NAMESPACE_NOT_EMPTY_ERROR = "Namespace is not empty: \\('com', 'organization', 'department'\\)" -def given_catalog_has_a_table(catalog: InMemoryCatalog) -> Table: +def given_catalog_has_a_table( + catalog: InMemoryCatalog, + properties: Properties = EMPTY_DICT, +) -> Table: return catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, - properties=TEST_TABLE_PROPERTIES, + properties=properties or TEST_TABLE_PROPERTIES, ) @@ -325,13 +333,51 @@ def test_create_table(catalog: InMemoryCatalog) -> None: table = catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) assert catalog.load_table(TEST_TABLE_IDENTIFIER) == table +def test_create_table_location_override(catalog: InMemoryCatalog) -> None: + new_location = f"{catalog._warehouse_location}/new_location" + table = catalog.create_table( + identifier=TEST_TABLE_IDENTIFIER, + schema=TEST_TABLE_SCHEMA, + location=new_location, + partition_spec=TEST_TABLE_PARTITION_SPEC, + properties=TEST_TABLE_PROPERTIES, + ) + assert catalog.load_table(TEST_TABLE_IDENTIFIER) == table + assert table.location() == new_location + + +@pytest.mark.parametrize( + "schema,expected", + [ + (lazy_fixture("pyarrow_schema_simple_without_ids"), lazy_fixture("iceberg_schema_simple_no_ids")), + (lazy_fixture("iceberg_schema_simple"), lazy_fixture("iceberg_schema_simple")), + (lazy_fixture("iceberg_schema_nested"), lazy_fixture("iceberg_schema_nested")), + (lazy_fixture("pyarrow_schema_nested_without_ids"), lazy_fixture("iceberg_schema_nested_no_ids")), + ], +) +def test_convert_schema_if_needed( + schema: Union[Schema, pa.Schema], + expected: Schema, + catalog: InMemoryCatalog, +) -> None: + assert expected == catalog._convert_schema_if_needed(schema) + + +def test_create_table_pyarrow_schema(catalog: InMemoryCatalog, pyarrow_schema_simple_without_ids: pa.Schema) -> None: + table = catalog.create_table( + identifier=TEST_TABLE_IDENTIFIER, + schema=pyarrow_schema_simple_without_ids, + properties=TEST_TABLE_PROPERTIES, + ) + assert catalog.load_table(TEST_TABLE_IDENTIFIER) == table + + def test_create_table_raises_error_when_table_already_exists(catalog: InMemoryCatalog) -> None: # Given given_catalog_has_a_table(catalog) @@ -587,8 +633,9 @@ def test_commit_table(catalog: InMemoryCatalog) -> None: # Then assert response.metadata.table_uuid == given_table.metadata.table_uuid - assert len(response.metadata.schemas) == 1 - assert response.metadata.schemas[0] == new_schema + assert len(response.metadata.schemas) == 2 + assert response.metadata.schemas[1] == new_schema + assert response.metadata.current_schema_id == new_schema.schema_id def test_add_column(catalog: InMemoryCatalog) -> None: @@ -601,9 +648,9 @@ def test_add_column(catalog: InMemoryCatalog) -> None: NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), NestedField(field_id=4, name="new_column1", field_type=IntegerType(), required=False), - schema_id=0, identifier_field_ids=[], ) + assert given_table.schema().schema_id == 1 transaction = given_table.transaction() transaction.update_schema().add_column(path="new_column2", field_type=IntegerType(), doc="doc").commit() @@ -615,9 +662,9 @@ def test_add_column(catalog: InMemoryCatalog) -> None: NestedField(field_id=3, name="z", field_type=LongType(), required=True), NestedField(field_id=4, name="new_column1", field_type=IntegerType(), required=False), NestedField(field_id=5, name="new_column2", field_type=IntegerType(), required=False, doc="doc"), - schema_id=0, identifier_field_ids=[], ) + assert given_table.schema().schema_id == 2 def test_add_column_with_statement(catalog: InMemoryCatalog) -> None: @@ -631,9 +678,9 @@ def test_add_column_with_statement(catalog: InMemoryCatalog) -> None: NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), NestedField(field_id=4, name="new_column1", field_type=IntegerType(), required=False), - schema_id=0, identifier_field_ids=[], ) + assert given_table.schema().schema_id == 1 with given_table.transaction() as tx: tx.update_schema().add_column(path="new_column2", field_type=IntegerType(), doc="doc").commit() @@ -644,11 +691,25 @@ def test_add_column_with_statement(catalog: InMemoryCatalog) -> None: NestedField(field_id=3, name="z", field_type=LongType(), required=True), NestedField(field_id=4, name="new_column1", field_type=IntegerType(), required=False), NestedField(field_id=5, name="new_column2", field_type=IntegerType(), required=False, doc="doc"), - schema_id=0, identifier_field_ids=[], ) + assert given_table.schema().schema_id == 2 def test_catalog_repr(catalog: InMemoryCatalog) -> None: s = repr(catalog) - assert s == "test.in.memory.catalog ()" + assert s == "test.in_memory.catalog ()" + + +def test_table_properties_int_value(catalog: InMemoryCatalog) -> None: + # table properties can be set to int, but still serialized to string + property_with_int = {"property_name": 42} + given_table = given_catalog_has_a_table(catalog, properties=property_with_int) + assert isinstance(given_table.properties["property_name"], str) + + +def test_table_properties_raise_for_none_value(catalog: InMemoryCatalog) -> None: + property_with_none = {"property_name": None} + with pytest.raises(ValidationError) as exc_info: + _ = given_catalog_has_a_table(catalog, properties=property_with_none) + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) diff --git a/tests/catalog/test_dynamodb.py b/tests/catalog/test_dynamodb.py index f28b70d3ac..218b0e8be7 100644 --- a/tests/catalog/test_dynamodb.py +++ b/tests/catalog/test_dynamodb.py @@ -15,12 +15,16 @@ # specific language governing permissions and limitations # under the License. from typing import List +from unittest import mock +import boto3 +import pyarrow as pa import pytest -from moto import mock_dynamodb +from moto import mock_aws from pyiceberg.catalog import METADATA_LOCATION, TABLE_TYPE from pyiceberg.catalog.dynamodb import ( + ACTIVE, DYNAMODB_COL_CREATED_AT, DYNAMODB_COL_IDENTIFIER, DYNAMODB_COL_NAMESPACE, @@ -41,19 +45,21 @@ from tests.conftest import BUCKET_NAME, TABLE_METADATA_LOCATION_REGEX -@mock_dynamodb +@mock_aws def test_create_dynamodb_catalog_with_table_name(_dynamodb, _bucket_initialize: None) -> None: # type: ignore DynamoDbCatalog("test_ddb_catalog") response = _dynamodb.describe_table(TableName=DYNAMODB_TABLE_NAME_DEFAULT) assert response["Table"]["TableName"] == DYNAMODB_TABLE_NAME_DEFAULT + assert response["Table"]["TableStatus"] == ACTIVE custom_table_name = "custom_table_name" DynamoDbCatalog("test_ddb_catalog", **{"table-name": custom_table_name}) response = _dynamodb.describe_table(TableName=custom_table_name) assert response["Table"]["TableName"] == custom_table_name + assert response["Table"]["TableStatus"] == ACTIVE -@mock_dynamodb +@mock_aws def test_create_table_with_database_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -66,7 +72,24 @@ def test_create_table_with_database_location( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws +def test_create_table_with_pyarrow_schema( + _bucket_initialize: None, + moto_endpoint_url: str, + pyarrow_schema_simple_without_ids: pa.Schema, + database_name: str, + table_name: str, +) -> None: + catalog_name = "test_ddb_catalog" + identifier = (database_name, table_name) + test_catalog = DynamoDbCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url}) + test_catalog.create_namespace(namespace=database_name, properties={"location": f"s3://{BUCKET_NAME}/{database_name}.db"}) + table = test_catalog.create_table(identifier, pyarrow_schema_simple_without_ids) + assert table.identifier == (catalog_name,) + identifier + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + + +@mock_aws def test_create_table_with_default_warehouse( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -79,7 +102,7 @@ def test_create_table_with_default_warehouse( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_given_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -94,7 +117,7 @@ def test_create_table_with_given_location( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_no_location( _bucket_initialize: None, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -105,7 +128,7 @@ def test_create_table_with_no_location( test_catalog.create_table(identifier=identifier, schema=table_schema_nested) -@mock_dynamodb +@mock_aws def test_create_table_with_strips( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -118,7 +141,7 @@ def test_create_table_with_strips( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_strips_bucket_root( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -131,7 +154,7 @@ def test_create_table_with_strips_bucket_root( assert TABLE_METADATA_LOCATION_REGEX.match(table_strip.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_no_database( _bucket_initialize: None, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -141,7 +164,7 @@ def test_create_table_with_no_database( test_catalog.create_table(identifier=identifier, schema=table_schema_nested) -@mock_dynamodb +@mock_aws def test_create_duplicated_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -153,7 +176,19 @@ def test_create_duplicated_table( test_catalog.create_table(identifier, table_schema_nested) -@mock_dynamodb +@mock_aws +def test_create_table_if_not_exists_duplicated_table( + _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + identifier = (database_name, table_name) + test_catalog = DynamoDbCatalog("test_ddb_catalog", **{"warehouse": f"s3://{BUCKET_NAME}", "s3.endpoint": moto_endpoint_url}) + test_catalog.create_namespace(namespace=database_name) + table1 = test_catalog.create_table(identifier, table_schema_nested) + table2 = test_catalog.create_table_if_not_exists(identifier, table_schema_nested) + assert table1.identifier == table2.identifier + + +@mock_aws def test_load_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -167,7 +202,7 @@ def test_load_table( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_load_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -182,7 +217,7 @@ def test_load_table_from_self_identifier( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_load_non_exist_table(_bucket_initialize: None, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog = DynamoDbCatalog("test_ddb_catalog", warehouse=f"s3://{BUCKET_NAME}") @@ -191,7 +226,7 @@ def test_load_non_exist_table(_bucket_initialize: None, database_name: str, tabl test_catalog.load_table(identifier) -@mock_dynamodb +@mock_aws def test_drop_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -208,7 +243,7 @@ def test_drop_table( test_catalog.load_table(identifier) -@mock_dynamodb +@mock_aws def test_drop_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -227,7 +262,7 @@ def test_drop_table_from_self_identifier( test_catalog.load_table(table.identifier) -@mock_dynamodb +@mock_aws def test_drop_non_exist_table(_bucket_initialize: None, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog = DynamoDbCatalog("test_ddb_catalog", warehouse=f"s3://{BUCKET_NAME}") @@ -235,7 +270,7 @@ def test_drop_non_exist_table(_bucket_initialize: None, database_name: str, tabl test_catalog.drop_table(identifier) -@mock_dynamodb +@mock_aws def test_rename_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -258,7 +293,7 @@ def test_rename_table( test_catalog.load_table(identifier) -@mock_dynamodb +@mock_aws def test_rename_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -283,7 +318,7 @@ def test_rename_table_from_self_identifier( test_catalog.load_table(table.identifier) -@mock_dynamodb +@mock_aws def test_fail_on_rename_table_with_missing_required_params(_bucket_initialize: None, database_name: str, table_name: str) -> None: new_database_name = f"{database_name}_new" new_table_name = f"{table_name}_new" @@ -307,8 +342,10 @@ def test_fail_on_rename_table_with_missing_required_params(_bucket_initialize: N test_catalog.rename_table(identifier, new_identifier) -@mock_dynamodb -def test_fail_on_rename_non_iceberg_table(_dynamodb, _bucket_initialize: None, database_name: str, table_name: str) -> None: # type: ignore +@mock_aws +def test_fail_on_rename_non_iceberg_table( + _dynamodb: boto3.client, _bucket_initialize: None, database_name: str, table_name: str +) -> None: new_database_name = f"{database_name}_new" new_table_name = f"{table_name}_new" identifier = (database_name, table_name) @@ -334,7 +371,7 @@ def test_fail_on_rename_non_iceberg_table(_dynamodb, _bucket_initialize: None, d test_catalog.rename_table(identifier, new_identifier) -@mock_dynamodb +@mock_aws def test_list_tables( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_list: List[str] ) -> None: @@ -347,7 +384,7 @@ def test_list_tables( assert (database_name, table_name) in loaded_table_list -@mock_dynamodb +@mock_aws def test_list_namespaces(_bucket_initialize: None, database_list: List[str]) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") for database_name in database_list: @@ -357,7 +394,7 @@ def test_list_namespaces(_bucket_initialize: None, database_list: List[str]) -> assert (database_name,) in loaded_database_list -@mock_dynamodb +@mock_aws def test_create_namespace_no_properties(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") test_catalog.create_namespace(namespace=database_name) @@ -368,7 +405,7 @@ def test_create_namespace_no_properties(_bucket_initialize: None, database_name: assert properties == {} -@mock_dynamodb +@mock_aws def test_create_namespace_with_comment_and_location(_bucket_initialize: None, database_name: str) -> None: test_location = f"s3://{BUCKET_NAME}/{database_name}.db" test_properties = { @@ -385,7 +422,7 @@ def test_create_namespace_with_comment_and_location(_bucket_initialize: None, da assert properties["location"] == test_location -@mock_dynamodb +@mock_aws def test_create_duplicated_namespace(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") test_catalog.create_namespace(namespace=database_name) @@ -396,7 +433,7 @@ def test_create_duplicated_namespace(_bucket_initialize: None, database_name: st test_catalog.create_namespace(namespace=database_name, properties={"test": "test"}) -@mock_dynamodb +@mock_aws def test_drop_namespace(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") test_catalog.create_namespace(namespace=database_name) @@ -408,7 +445,7 @@ def test_drop_namespace(_bucket_initialize: None, database_name: str) -> None: assert len(loaded_database_list) == 0 -@mock_dynamodb +@mock_aws def test_drop_non_empty_namespace( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -421,14 +458,14 @@ def test_drop_non_empty_namespace( test_catalog.drop_namespace(database_name) -@mock_dynamodb +@mock_aws def test_drop_non_exist_namespace(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") with pytest.raises(NoSuchNamespaceError): test_catalog.drop_namespace(database_name) -@mock_dynamodb +@mock_aws def test_load_namespace_properties(_bucket_initialize: None, database_name: str) -> None: test_location = f"s3://{BUCKET_NAME}/{database_name}.db" test_properties = { @@ -446,14 +483,14 @@ def test_load_namespace_properties(_bucket_initialize: None, database_name: str) assert v == test_properties[k] -@mock_dynamodb +@mock_aws def test_load_non_exist_namespace_properties(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") with pytest.raises(NoSuchNamespaceError): test_catalog.load_namespace_properties(database_name) -@mock_dynamodb +@mock_aws def test_update_namespace_properties(_bucket_initialize: None, database_name: str) -> None: test_properties = { "comment": "this is a test description", @@ -478,7 +515,7 @@ def test_update_namespace_properties(_bucket_initialize: None, database_name: st test_catalog.drop_namespace(database_name) -@mock_dynamodb +@mock_aws def test_load_empty_namespace_properties(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") test_catalog.create_namespace(database_name) @@ -486,7 +523,7 @@ def test_load_empty_namespace_properties(_bucket_initialize: None, database_name assert listed_properties == {} -@mock_dynamodb +@mock_aws def test_update_namespace_properties_overlap_update_removal(_bucket_initialize: None, database_name: str) -> None: test_properties = { "comment": "this is a test description", @@ -503,3 +540,25 @@ def test_update_namespace_properties_overlap_update_removal(_bucket_initialize: test_catalog.update_namespace_properties(database_name, removals, updates) # should not modify the properties assert test_catalog.load_namespace_properties(database_name) == test_properties + + +def test_passing_provided_profile() -> None: + catalog_name = "test_ddb_catalog" + session_props = { + "aws_access_key_id": "abc", + "aws_secret_access_key": "def", + "aws_session_token": "ghi", + "region_name": "eu-central-1", + "botocore_session": None, + "profile_name": None, + } + props = {"py-io-impl": "pyiceberg.io.fsspec.FsspecFileIO"} + props.update(session_props) # type: ignore + with mock.patch('boto3.Session', return_value=mock.Mock()) as mock_session: + mock_client = mock.Mock() + mock_session.return_value.client.return_value = mock_client + mock_client.describe_table.return_value = {'Table': {'TableStatus': 'ACTIVE'}} + test_catalog = DynamoDbCatalog(catalog_name, **props) + assert test_catalog.dynamodb is mock_client + mock_session.assert_called_with(**session_props) + assert test_catalog.dynamodb is mock_session().client() diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index 6aaeb2898c..6d44d92724 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -17,8 +17,10 @@ from typing import Any, Dict, List from unittest import mock +import boto3 +import pyarrow as pa import pytest -from moto import mock_glue +from moto import mock_aws from pyiceberg.catalog.glue import GlueCatalog from pyiceberg.exceptions import ( @@ -31,12 +33,18 @@ TableAlreadyExistsError, ) from pyiceberg.schema import Schema +from pyiceberg.types import IntegerType from tests.conftest import BUCKET_NAME, TABLE_METADATA_LOCATION_REGEX -@mock_glue +@mock_aws def test_create_table_with_database_location( - _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str + _glue: boto3.client, + _bucket_initialize: None, + moto_endpoint_url: str, + table_schema_nested: Schema, + database_name: str, + table_name: str, ) -> None: catalog_name = "glue" identifier = (database_name, table_name) @@ -45,9 +53,58 @@ def test_create_table_with_database_location( table = test_catalog.create_table(identifier, table_schema_nested) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + # Ensure schema is also pushed to Glue + table_info = _glue.get_table( + DatabaseName=database_name, + Name=table_name, + ) + storage_descriptor = table_info["Table"]["StorageDescriptor"] + columns = storage_descriptor["Columns"] + assert len(columns) == len(table_schema_nested.fields) + assert columns[0] == { + "Name": "foo", + "Type": "string", + "Parameters": {"iceberg.field.id": "1", "iceberg.field.optional": "true", "iceberg.field.current": "true"}, + } -@mock_glue + assert storage_descriptor["Location"] == f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}" + + +@mock_aws +def test_create_v1_table( + _bucket_initialize: None, + _glue: boto3.client, + moto_endpoint_url: str, + table_schema_nested: Schema, + database_name: str, + table_name: str, +) -> None: + catalog_name = "glue" + test_catalog = GlueCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url}) + test_catalog.create_namespace(namespace=database_name, properties={"location": f"s3://{BUCKET_NAME}/{database_name}.db"}) + table = test_catalog.create_table((database_name, table_name), table_schema_nested, properties={"format-version": "1"}) + assert table.format_version == 1 + + table_info = _glue.get_table( + DatabaseName=database_name, + Name=table_name, + ) + + storage_descriptor = table_info["Table"]["StorageDescriptor"] + columns = storage_descriptor["Columns"] + assert len(columns) == len(table_schema_nested.fields) + assert columns[0] == { + "Name": "foo", + "Type": "string", + "Parameters": {"iceberg.field.id": "1", "iceberg.field.optional": "true", "iceberg.field.current": "true"}, + } + + assert storage_descriptor["Location"] == f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}" + + +@mock_aws def test_create_table_with_default_warehouse( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -58,9 +115,10 @@ def test_create_table_with_default_warehouse( table = test_catalog.create_table(identifier, table_schema_nested) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 -@mock_glue +@mock_aws def test_create_table_with_given_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -73,9 +131,32 @@ def test_create_table_with_given_location( ) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + + +@mock_aws +def test_create_table_with_pyarrow_schema( + _bucket_initialize: None, + moto_endpoint_url: str, + pyarrow_schema_simple_without_ids: pa.Schema, + database_name: str, + table_name: str, +) -> None: + catalog_name = "glue" + identifier = (database_name, table_name) + test_catalog = GlueCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url}) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table( + identifier=identifier, + schema=pyarrow_schema_simple_without_ids, + location=f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}", + ) + assert table.identifier == (catalog_name,) + identifier + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 -@mock_glue +@mock_aws def test_create_table_with_no_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -87,7 +168,7 @@ def test_create_table_with_no_location( test_catalog.create_table(identifier=identifier, schema=table_schema_nested) -@mock_glue +@mock_aws def test_create_table_with_strips( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -98,9 +179,10 @@ def test_create_table_with_strips( table = test_catalog.create_table(identifier, table_schema_nested) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 -@mock_glue +@mock_aws def test_create_table_with_strips_bucket_root( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -111,9 +193,10 @@ def test_create_table_with_strips_bucket_root( table_strip = test_catalog.create_table(identifier, table_schema_nested) assert table_strip.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table_strip.metadata_location) + assert test_catalog._parse_metadata_version(table_strip.metadata_location) == 0 -@mock_glue +@mock_aws def test_create_table_with_no_database( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -123,7 +206,28 @@ def test_create_table_with_no_database( test_catalog.create_table(identifier=identifier, schema=table_schema_nested) -@mock_glue +@mock_aws +def test_create_table_with_glue_catalog_id( + _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + catalog_name = "glue" + catalog_id = "444444444444" + identifier = (database_name, table_name) + test_catalog = GlueCatalog( + catalog_name, **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}", "glue.id": catalog_id} + ) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier, table_schema_nested) + assert table.identifier == (catalog_name,) + identifier + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + + glue = boto3.client("glue") + databases = glue.get_databases() + assert databases["DatabaseList"][0]["CatalogId"] == catalog_id + + +@mock_aws def test_create_duplicated_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -135,7 +239,7 @@ def test_create_duplicated_table( test_catalog.create_table(identifier, table_schema_nested) -@mock_glue +@mock_aws def test_load_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -147,9 +251,10 @@ def test_load_table( table = test_catalog.load_table(identifier) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 -@mock_glue +@mock_aws def test_load_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -163,7 +268,7 @@ def test_load_table_from_self_identifier( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_glue +@mock_aws def test_load_non_exist_table(_bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}/"}) @@ -172,7 +277,7 @@ def test_load_non_exist_table(_bucket_initialize: None, moto_endpoint_url: str, test_catalog.load_table(identifier) -@mock_glue +@mock_aws def test_drop_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -189,7 +294,7 @@ def test_drop_table( test_catalog.load_table(identifier) -@mock_glue +@mock_aws def test_drop_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -208,7 +313,7 @@ def test_drop_table_from_self_identifier( test_catalog.load_table(table.identifier) -@mock_glue +@mock_aws def test_drop_non_exist_table(_bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}/"}) @@ -216,7 +321,7 @@ def test_drop_non_exist_table(_bucket_initialize: None, moto_endpoint_url: str, test_catalog.drop_table(identifier) -@mock_glue +@mock_aws def test_rename_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -229,6 +334,7 @@ def test_rename_table( table = test_catalog.create_table(identifier, table_schema_nested) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 test_catalog.rename_table(identifier, new_identifier) new_table = test_catalog.load_table(new_identifier) assert new_table.identifier == (catalog_name,) + new_identifier @@ -239,7 +345,7 @@ def test_rename_table( test_catalog.load_table(identifier) -@mock_glue +@mock_aws def test_rename_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -264,8 +370,10 @@ def test_rename_table_from_self_identifier( test_catalog.load_table(table.identifier) -@mock_glue -def test_rename_table_no_params(_glue, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str) -> None: # type: ignore +@mock_aws +def test_rename_table_no_params( + _glue: boto3.client, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str +) -> None: new_database_name = f"{database_name}_new" new_table_name = f"{table_name}_new" identifier = (database_name, table_name) @@ -281,8 +389,10 @@ def test_rename_table_no_params(_glue, _bucket_initialize: None, moto_endpoint_u test_catalog.rename_table(identifier, new_identifier) -@mock_glue -def test_rename_non_iceberg_table(_glue, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str) -> None: # type: ignore +@mock_aws +def test_rename_non_iceberg_table( + _glue: boto3.client, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str +) -> None: new_database_name = f"{database_name}_new" new_table_name = f"{table_name}_new" identifier = (database_name, table_name) @@ -302,7 +412,7 @@ def test_rename_non_iceberg_table(_glue, _bucket_initialize: None, moto_endpoint test_catalog.rename_table(identifier, new_identifier) -@mock_glue +@mock_aws def test_list_tables( _bucket_initialize: None, moto_endpoint_url: str, @@ -320,7 +430,7 @@ def test_list_tables( assert (database_name, table_name) in loaded_table_list -@mock_glue +@mock_aws def test_list_namespaces(_bucket_initialize: None, moto_endpoint_url: str, database_list: List[str]) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) for database_name in database_list: @@ -330,7 +440,7 @@ def test_list_namespaces(_bucket_initialize: None, moto_endpoint_url: str, datab assert (database_name,) in loaded_database_list -@mock_glue +@mock_aws def test_create_namespace_no_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) test_catalog.create_namespace(namespace=database_name) @@ -341,7 +451,7 @@ def test_create_namespace_no_properties(_bucket_initialize: None, moto_endpoint_ assert properties == {} -@mock_glue +@mock_aws def test_create_namespace_with_comment_and_location(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_location = f"s3://{BUCKET_NAME}/{database_name}.db" test_properties = { @@ -358,7 +468,7 @@ def test_create_namespace_with_comment_and_location(_bucket_initialize: None, mo assert properties["location"] == test_location -@mock_glue +@mock_aws def test_create_duplicated_namespace(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) test_catalog.create_namespace(namespace=database_name) @@ -369,7 +479,7 @@ def test_create_duplicated_namespace(_bucket_initialize: None, moto_endpoint_url test_catalog.create_namespace(namespace=database_name, properties={"test": "test"}) -@mock_glue +@mock_aws def test_drop_namespace(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) test_catalog.create_namespace(namespace=database_name) @@ -381,7 +491,7 @@ def test_drop_namespace(_bucket_initialize: None, moto_endpoint_url: str, databa assert len(loaded_database_list) == 0 -@mock_glue +@mock_aws def test_drop_non_empty_namespace( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -394,14 +504,14 @@ def test_drop_non_empty_namespace( test_catalog.drop_namespace(database_name) -@mock_glue +@mock_aws def test_drop_non_exist_namespace(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) with pytest.raises(NoSuchNamespaceError): test_catalog.drop_namespace(database_name) -@mock_glue +@mock_aws def test_load_namespace_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_location = f"s3://{BUCKET_NAME}/{database_name}.db" test_properties = { @@ -419,14 +529,14 @@ def test_load_namespace_properties(_bucket_initialize: None, moto_endpoint_url: assert v == test_properties[k] -@mock_glue +@mock_aws def test_load_non_exist_namespace_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) with pytest.raises(NoSuchNamespaceError): test_catalog.load_namespace_properties(database_name) -@mock_glue +@mock_aws def test_update_namespace_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_properties = { "comment": "this is a test description", @@ -451,7 +561,7 @@ def test_update_namespace_properties(_bucket_initialize: None, moto_endpoint_url test_catalog.drop_namespace(database_name) -@mock_glue +@mock_aws def test_load_empty_namespace_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) test_catalog.create_namespace(database_name) @@ -459,7 +569,7 @@ def test_load_empty_namespace_properties(_bucket_initialize: None, moto_endpoint assert listed_properties == {} -@mock_glue +@mock_aws def test_load_default_namespace_properties(_glue, _bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: # type: ignore # simulate creating database with default settings through AWS Glue Web Console _glue.create_database(DatabaseInput={"Name": database_name}) @@ -468,7 +578,7 @@ def test_load_default_namespace_properties(_glue, _bucket_initialize: None, moto assert listed_properties == {} -@mock_glue +@mock_aws def test_update_namespace_properties_overlap_update_removal( _bucket_initialize: None, moto_endpoint_url: str, database_name: str ) -> None: @@ -489,7 +599,7 @@ def test_update_namespace_properties_overlap_update_removal( assert test_catalog.load_namespace_properties(database_name) == test_properties -@mock_glue +@mock_aws def test_passing_profile_name() -> None: session_properties: Dict[str, Any] = { "aws_access_key_id": "abc", @@ -507,3 +617,78 @@ def test_passing_profile_name() -> None: mock_session.assert_called_with(**session_properties) assert test_catalog.glue is mock_session().client() + + +@mock_aws +def test_commit_table_update_schema( + _glue: boto3.client, + _bucket_initialize: None, + moto_endpoint_url: str, + table_schema_nested: Schema, + database_name: str, + table_name: str, +) -> None: + catalog_name = "glue" + identifier = (database_name, table_name) + test_catalog = GlueCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}"}) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier, table_schema_nested) + original_table_metadata = table.metadata + + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + assert original_table_metadata.current_schema_id == 0 + + transaction = table.transaction() + update = transaction.update_schema() + update.add_column(path="b", field_type=IntegerType()) + update.commit() + transaction.commit_transaction() + + updated_table_metadata = table.metadata + + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.current_schema_id == 1 + assert len(updated_table_metadata.schemas) == 2 + new_schema = next(schema for schema in updated_table_metadata.schemas if schema.schema_id == 1) + assert new_schema + assert new_schema == update._apply() + assert new_schema.find_field("b").field_type == IntegerType() + + # Ensure schema is also pushed to Glue + table_info = _glue.get_table( + DatabaseName=database_name, + Name=table_name, + ) + storage_descriptor = table_info["Table"]["StorageDescriptor"] + columns = storage_descriptor["Columns"] + assert len(columns) == len(table_schema_nested.fields) + 1 + assert columns[-1] == { + "Name": "b", + "Type": "int", + "Parameters": {"iceberg.field.id": "18", "iceberg.field.optional": "true", "iceberg.field.current": "true"}, + } + assert storage_descriptor["Location"] == f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}" + + +@mock_aws +def test_commit_table_properties( + _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + catalog_name = "glue" + identifier = (database_name, table_name) + test_catalog = GlueCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}"}) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier=identifier, schema=table_schema_nested, properties={"test_a": "test_a"}) + + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + + transaction = table.transaction() + transaction.set_properties(test_a="test_aa", test_b="test_b", test_c="test_c") + transaction.remove_properties("test_b") + transaction.commit_transaction() + + updated_table_metadata = table.metadata + assert test_catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.properties == {"test_a": "test_aa", "test_c": "test_c"} diff --git a/tests/catalog/test_hive.py b/tests/catalog/test_hive.py index 54336786f9..e59b7599bc 100644 --- a/tests/catalog/test_hive.py +++ b/tests/catalog/test_hive.py @@ -43,7 +43,7 @@ ) from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.table.metadata import TableMetadataUtil, TableMetadataV2 +from pyiceberg.table.metadata import TableMetadataUtil, TableMetadataV1, TableMetadataV2 from pyiceberg.table.refs import SnapshotRef, SnapshotRefType from pyiceberg.table.snapshots import ( MetadataLogEntry, @@ -277,8 +277,8 @@ def test_create_table(table_schema_simple: Schema, hive_database: HiveDatabase, ) ], current_schema_id=0, - last_partition_id=1000, - properties={"owner": "javaberg"}, + last_partition_id=999, + properties={"owner": "javaberg", 'write.parquet.compression-codec': 'zstd'}, partition_specs=[PartitionSpec()], default_spec_id=0, current_snapshot_id=None, @@ -295,6 +295,59 @@ def test_create_table(table_schema_simple: Schema, hive_database: HiveDatabase, assert metadata.model_dump() == expected.model_dump() +@patch("time.time", MagicMock(return_value=12345)) +def test_create_v1_table(table_schema_simple: Schema, hive_database: HiveDatabase, hive_table: HiveTable) -> None: + catalog = HiveCatalog(HIVE_CATALOG_NAME, uri=HIVE_METASTORE_FAKE_URL) + + catalog._client = MagicMock() + catalog._client.__enter__().create_table.return_value = None + catalog._client.__enter__().get_table.return_value = hive_table + catalog._client.__enter__().get_database.return_value = hive_database + catalog.create_table( + ("default", "table"), schema=table_schema_simple, properties={"owner": "javaberg", "format-version": "1"} + ) + + # Test creating V1 table + called_v1_table: HiveTable = catalog._client.__enter__().create_table.call_args[0][0] + metadata_location = called_v1_table.parameters["metadata_location"] + with open(metadata_location, encoding=UTF8) as f: + payload = f.read() + + expected_schema = Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + schema_id=0, + identifier_field_ids=[2], + ) + actual_v1_metadata = TableMetadataUtil.parse_raw(payload) + expected_spec = PartitionSpec() + expected_v1_metadata = TableMetadataV1( + location=actual_v1_metadata.location, + table_uuid=actual_v1_metadata.table_uuid, + last_updated_ms=actual_v1_metadata.last_updated_ms, + last_column_id=3, + schema=expected_schema, + schemas=[expected_schema], + current_schema_id=0, + last_partition_id=999, + properties={"owner": "javaberg", "write.parquet.compression-codec": "zstd"}, + partition_spec=[], + partition_specs=[expected_spec], + default_spec_id=0, + current_snapshot_id=None, + snapshots=[], + snapshot_log=[], + metadata_log=[], + sort_orders=[SortOrder(order_id=0)], + default_sort_order_id=0, + refs={}, + format_version=1, + ) + + assert actual_v1_metadata.model_dump() == expected_v1_metadata.model_dump() + + def test_load_table(hive_table: HiveTable) -> None: catalog = HiveCatalog(HIVE_CATALOG_NAME, uri=HIVE_METASTORE_FAKE_URL) diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 79cf25f87a..850e5f0180 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -16,16 +16,17 @@ # under the License. # pylint: disable=redefined-outer-name,unused-argument import os -from typing import Any, Dict, cast +from typing import Any, Callable, Dict, cast from unittest import mock import pytest from requests_mock import Mocker import pyiceberg -from pyiceberg.catalog import PropertiesUpdateSummary, Table, load_catalog -from pyiceberg.catalog.rest import RestCatalog +from pyiceberg.catalog import PropertiesUpdateSummary, load_catalog +from pyiceberg.catalog.rest import AUTH_URL, RestCatalog from pyiceberg.exceptions import ( + AuthorizationExpiredError, NamespaceAlreadyExistsError, NoSuchNamespaceError, NoSuchTableError, @@ -35,6 +36,7 @@ from pyiceberg.io import load_file_io from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema +from pyiceberg.table import Table from pyiceberg.table.metadata import TableMetadataV1 from pyiceberg.table.sorting import SortField, SortOrder from pyiceberg.transforms import IdentityTransform, TruncateTransform @@ -43,7 +45,12 @@ TEST_URI = "https://iceberg-test-catalog/" TEST_CREDENTIALS = "client:secret" +TEST_AUTH_URL = "https://auth-endpoint/" TEST_TOKEN = "some_jwt_token" +TEST_SCOPE = "openid_offline_corpds_ds_profile" +TEST_AUDIENCE = "test_audience" +TEST_RESOURCE = "test_resource" + TEST_HEADERS = { "Content-type": "application/json", "X-Client-Version": "0.14.1", @@ -106,6 +113,24 @@ def test_token_200(rest_mock: Mocker) -> None: "token_type": "Bearer", "expires_in": 86400, "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + "scope": "openid offline", + "refresh_token": "refresh_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS)._session.headers["Authorization"] # pylint: disable=W0212 + == f"Bearer {TEST_TOKEN}" + ) + + +def test_token_200_without_optional_fields(rest_mock: Mocker) -> None: + rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", }, status_code=200, request_headers=OAUTH_TEST_HEADERS, @@ -116,6 +141,107 @@ def test_token_200(rest_mock: Mocker) -> None: ) +def test_token_with_optional_oauth_params(rest_mock: Mocker) -> None: + mock_request = rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog( + "rest", uri=TEST_URI, credential=TEST_CREDENTIALS, audience=TEST_AUDIENCE, resource=TEST_RESOURCE + )._session.headers["Authorization"] + == f"Bearer {TEST_TOKEN}" + ) + assert TEST_AUDIENCE in mock_request.last_request.text + assert TEST_RESOURCE in mock_request.last_request.text + + +def test_token_with_optional_oauth_params_as_empty(rest_mock: Mocker) -> None: + mock_request = rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS, audience="", resource="")._session.headers["Authorization"] + == f"Bearer {TEST_TOKEN}" + ) + assert TEST_AUDIENCE not in mock_request.last_request.text + assert TEST_RESOURCE not in mock_request.last_request.text + + +def test_token_with_default_scope(rest_mock: Mocker) -> None: + mock_request = rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS)._session.headers["Authorization"] == f"Bearer {TEST_TOKEN}" + ) + assert "catalog" in mock_request.last_request.text + + +def test_token_with_custom_scope(rest_mock: Mocker) -> None: + mock_request = rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS, scope=TEST_SCOPE)._session.headers["Authorization"] + == f"Bearer {TEST_TOKEN}" + ) + assert TEST_SCOPE in mock_request.last_request.text + + +def test_token_200_w_auth_url(rest_mock: Mocker) -> None: + rest_mock.post( + TEST_AUTH_URL, + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + # pylint: disable=W0212 + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS, **{AUTH_URL: TEST_AUTH_URL})._session.headers[ + "Authorization" + ] + == f"Bearer {TEST_TOKEN}" + ) + # pylint: enable=W0212 + + def test_config_200(requests_mock: Mocker) -> None: requests_mock.get( f"{TEST_URI}v1/config", @@ -143,6 +269,45 @@ def test_config_200(requests_mock: Mocker) -> None: assert history[1].url == "https://iceberg-test-catalog/v1/config?warehouse=s3%3A%2F%2Fsome-bucket" +def test_properties_sets_headers(requests_mock: Mocker) -> None: + requests_mock.get( + f"{TEST_URI}v1/config", + json={"defaults": {}, "overrides": {}}, + status_code=200, + ) + + catalog = RestCatalog( + "rest", uri=TEST_URI, warehouse="s3://some-bucket", **{"header.Content-Type": "application/vnd.api+json"} + ) + + assert ( + catalog._session.headers.get("Content-type") == "application/vnd.api+json" + ), "Expected 'Content-Type' header to be 'application/vnd.api+json'" + + assert ( + requests_mock.last_request.headers["Content-type"] == "application/vnd.api+json" + ), "Config request did not include expected 'Content-Type' header" + + +def test_config_sets_headers(requests_mock: Mocker) -> None: + namespace = "leden" + requests_mock.get( + f"{TEST_URI}v1/config", + json={"defaults": {"header.Content-Type": "application/vnd.api+json"}, "overrides": {}}, + status_code=200, + ) + requests_mock.post(f"{TEST_URI}v1/namespaces", json={"namespace": [namespace], "properties": {}}, status_code=200) + catalog = RestCatalog("rest", uri=TEST_URI, warehouse="s3://some-bucket") + catalog.create_namespace(namespace) + + assert ( + catalog._session.headers.get("Content-type") == "application/vnd.api+json" + ), "Expected 'Content-Type' header to be 'application/vnd.api+json'" + assert ( + requests_mock.last_request.headers["Content-type"] == "application/vnd.api+json" + ), "Create namespace request did not include expected 'Content-Type' header" + + def test_token_400(rest_mock: Mocker) -> None: rest_mock.post( f"{TEST_URI}v1/oauth/tokens", @@ -243,6 +408,68 @@ def test_list_namespace_with_parent_200(rest_mock: Mocker) -> None: ] +def test_list_namespaces_token_expired(rest_mock: Mocker) -> None: + new_token = "new_jwt_token" + new_header = dict(TEST_HEADERS) + new_header["Authorization"] = f"Bearer {new_token}" + + namespaces = rest_mock.register_uri( + "GET", + f"{TEST_URI}v1/namespaces", + [ + { + "status_code": 419, + "json": { + "error": { + "message": "Authorization expired.", + "type": "AuthorizationExpiredError", + "code": 419, + } + }, + "headers": TEST_HEADERS, + }, + { + "status_code": 200, + "json": {"namespaces": [["default"], ["examples"], ["fokko"], ["system"]]}, + "headers": new_header, + }, + { + "status_code": 200, + "json": {"namespaces": [["default"], ["examples"], ["fokko"], ["system"]]}, + "headers": new_header, + }, + ], + ) + tokens = rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": new_token, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + ) + catalog = RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN, credential=TEST_CREDENTIALS) + assert catalog.list_namespaces() == [ + ("default",), + ("examples",), + ("fokko",), + ("system",), + ] + assert namespaces.call_count == 2 + assert tokens.call_count == 1 + + assert catalog.list_namespaces() == [ + ("default",), + ("examples",), + ("fokko",), + ("system",), + ] + assert namespaces.call_count == 3 + assert tokens.call_count == 1 + + def test_create_namespace_200(rest_mock: Mocker) -> None: namespace = "leden" rest_mock.post( @@ -494,6 +721,93 @@ def test_create_table_409(rest_mock: Mocker, table_schema_simple: Schema) -> Non assert "Table already exists" in str(e.value) +def test_create_table_if_not_exists_200( + rest_mock: Mocker, table_schema_simple: Schema, example_table_metadata_no_snapshot_v1_rest_json: Dict[str, Any] +) -> None: + def json_callback() -> Callable[[Any, Any], Dict[str, Any]]: + call_count = 0 + + def callback(request: Any, context: Any) -> Dict[str, Any]: + nonlocal call_count + call_count += 1 + + if call_count == 1: + context.status_code = 200 + return example_table_metadata_no_snapshot_v1_rest_json + else: + context.status_code = 409 + return { + "error": { + "message": "Table already exists: fokko.already_exists in warehouse 8bcb0838-50fc-472d-9ddb-8feb89ef5f1e", + "type": "AlreadyExistsException", + "code": 409, + } + } + + return callback + + rest_mock.post( + f"{TEST_URI}v1/namespaces/fokko/tables", + json=json_callback(), + request_headers=TEST_HEADERS, + ) + rest_mock.get( + f"{TEST_URI}v1/namespaces/fokko/tables/fokko2", + json=example_table_metadata_no_snapshot_v1_rest_json, + status_code=200, + request_headers=TEST_HEADERS, + ) + catalog = RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN) + table1 = catalog.create_table( + identifier=("fokko", "fokko2"), + schema=table_schema_simple, + location=None, + partition_spec=PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=TruncateTransform(width=3), name="id"), spec_id=1 + ), + sort_order=SortOrder(SortField(source_id=2, transform=IdentityTransform())), + properties={"owner": "fokko"}, + ) + table2 = catalog.create_table_if_not_exists( + identifier=("fokko", "fokko2"), + schema=table_schema_simple, + location=None, + partition_spec=PartitionSpec(PartitionField(source_id=1, field_id=1000, transform=TruncateTransform(width=3), name="id")), + sort_order=SortOrder(SortField(source_id=2, transform=IdentityTransform())), + properties={"owner": "fokko"}, + ) + assert table1 == table2 + + +def test_create_table_419(rest_mock: Mocker, table_schema_simple: Schema) -> None: + rest_mock.post( + f"{TEST_URI}v1/namespaces/fokko/tables", + json={ + "error": { + "message": "Authorization expired.", + "type": "AuthorizationExpiredError", + "code": 419, + } + }, + status_code=419, + request_headers=TEST_HEADERS, + ) + + with pytest.raises(AuthorizationExpiredError) as e: + RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN).create_table( + identifier=("fokko", "fokko2"), + schema=table_schema_simple, + location=None, + partition_spec=PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=TruncateTransform(width=3), name="id") + ), + sort_order=SortOrder(SortField(source_id=2, transform=IdentityTransform())), + properties={"owner": "fokko"}, + ) + assert "Authorization expired" in str(e.value) + assert rest_mock.call_count == 3 + + def test_register_table_200( rest_mock: Mocker, table_schema_simple: Schema, example_table_metadata_no_snapshot_v1_rest_json: Dict[str, Any] ) -> None: diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 95dc24ad15..3a77f8678a 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -19,14 +19,15 @@ from pathlib import Path from typing import Generator, List +import pyarrow as pa import pytest -from pytest import TempPathFactory +from pydantic_core import ValidationError from pytest_lazyfixture import lazy_fixture from sqlalchemy.exc import ArgumentError, IntegrityError -from pyiceberg.catalog import Identifier from pyiceberg.catalog.sql import SqlCatalog from pyiceberg.exceptions import ( + CommitFailedException, NamespaceAlreadyExistsError, NamespaceNotEmptyError, NoSuchNamespaceError, @@ -34,7 +35,12 @@ NoSuchTableError, TableAlreadyExistsError, ) +from pyiceberg.io import FSSPEC_FILE_IO, PY_IO_IMPL +from pyiceberg.io.pyarrow import schema_to_pyarrow +from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC from pyiceberg.schema import Schema +from pyiceberg.table import _dataframe_to_data_files +from pyiceberg.table.snapshots import Operation from pyiceberg.table.sorting import ( NullOrder, SortDirection, @@ -42,11 +48,8 @@ SortOrder, ) from pyiceberg.transforms import IdentityTransform - - -@pytest.fixture(name="warehouse", scope="session") -def fixture_warehouse(tmp_path_factory: TempPathFactory) -> Path: - return tmp_path_factory.mktemp("test_sql") +from pyiceberg.typedef import Identifier +from pyiceberg.types import IntegerType @pytest.fixture(name="random_identifier") @@ -66,7 +69,7 @@ def fixture_another_random_identifier(warehouse: Path, database_name: str, table @pytest.fixture(scope="module") def catalog_memory(warehouse: Path) -> Generator[SqlCatalog, None, None]: props = { - "uri": "sqlite+pysqlite:///:memory:", + "uri": "sqlite:///:memory:", "warehouse": f"file://{warehouse}", } catalog = SqlCatalog("test_sql_catalog", **props) @@ -78,10 +81,36 @@ def catalog_memory(warehouse: Path) -> Generator[SqlCatalog, None, None]: @pytest.fixture(scope="module") def catalog_sqlite(warehouse: Path) -> Generator[SqlCatalog, None, None]: props = { - "uri": "sqlite:////tmp/sql-catalog.db", + "uri": f"sqlite:////{warehouse}/sql-catalog.db", + "warehouse": f"file://{warehouse}", + } + catalog = SqlCatalog("test_sql_catalog", **props) + catalog.create_tables() + yield catalog + catalog.destroy_tables() + + +@pytest.fixture(scope="module") +def catalog_sqlite_without_rowcount(warehouse: Path) -> Generator[SqlCatalog, None, None]: + props = { + "uri": f"sqlite:////{warehouse}/sql-catalog.db", "warehouse": f"file://{warehouse}", } catalog = SqlCatalog("test_sql_catalog", **props) + catalog.engine.dialect.supports_sane_rowcount = False + catalog.create_tables() + yield catalog + catalog.destroy_tables() + + +@pytest.fixture(scope="module") +def catalog_sqlite_fsspec(warehouse: Path) -> Generator[SqlCatalog, None, None]: + props = { + "uri": f"sqlite:////{warehouse}/sql-catalog.db", + "warehouse": f"file://{warehouse}", + PY_IO_IMPL: FSSPEC_FILE_IO, + } + catalog = SqlCatalog("test_sql_catalog", **props) catalog.create_tables() yield catalog catalog.destroy_tables() @@ -126,6 +155,44 @@ def test_create_table_default_sort_order(catalog: SqlCatalog, table_schema_neste catalog.drop_table(random_identifier) +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + ], +) +def test_create_v1_table(catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table = catalog.create_table(random_identifier, table_schema_nested, properties={"format-version": "1"}) + assert table.sort_order().order_id == 0, "Order ID must match" + assert table.sort_order().is_unsorted is True, "Order must be unsorted" + assert table.format_version == 1 + assert table.spec() == UNPARTITIONED_PARTITION_SPEC + catalog.drop_table(random_identifier) + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + ], +) +def test_create_table_with_pyarrow_schema( + catalog: SqlCatalog, + pyarrow_schema_simple_without_ids: pa.Schema, + iceberg_table_schema_simple: Schema, + random_identifier: Identifier, +) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table = catalog.create_table(random_identifier, pyarrow_schema_simple_without_ids) + assert table.schema() == iceberg_table_schema_simple + catalog.drop_table(random_identifier) + + @pytest.mark.parametrize( 'catalog', [ @@ -182,6 +249,23 @@ def test_create_duplicated_table(catalog: SqlCatalog, table_schema_nested: Schem catalog.create_table(random_identifier, table_schema_nested) +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + ], +) +def test_create_table_if_not_exists_duplicated_table( + catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier +) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table1 = catalog.create_table(random_identifier, table_schema_nested) + table2 = catalog.create_table_if_not_exists(random_identifier, table_schema_nested) + assert table1.identifier == table2.identifier + + @pytest.mark.parametrize( 'catalog', [ @@ -305,6 +389,7 @@ def test_load_table_from_self_identifier(catalog: SqlCatalog, table_schema_neste [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_drop_table(catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier) -> None: @@ -322,6 +407,7 @@ def test_drop_table(catalog: SqlCatalog, table_schema_nested: Schema, random_ide [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_drop_table_from_self_identifier(catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier) -> None: @@ -341,6 +427,7 @@ def test_drop_table_from_self_identifier(catalog: SqlCatalog, table_schema_neste [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_drop_table_that_does_not_exist(catalog: SqlCatalog, random_identifier: Identifier) -> None: @@ -353,6 +440,7 @@ def test_drop_table_that_does_not_exist(catalog: SqlCatalog, random_identifier: [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_table( @@ -377,6 +465,7 @@ def test_rename_table( [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_table_from_self_identifier( @@ -403,6 +492,7 @@ def test_rename_table_from_self_identifier( [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_table_to_existing_one( @@ -425,6 +515,7 @@ def test_rename_table_to_existing_one( [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_missing_table(catalog: SqlCatalog, random_identifier: Identifier, another_random_identifier: Identifier) -> None: @@ -439,6 +530,7 @@ def test_rename_missing_table(catalog: SqlCatalog, random_identifier: Identifier [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_table_to_missing_namespace( @@ -549,7 +641,7 @@ def test_create_namespace_with_null_properties(catalog: SqlCatalog, database_nam catalog.create_namespace(namespace=database_name, properties={None: "value"}) # type: ignore with pytest.raises(IntegrityError): - catalog.create_namespace(namespace=database_name, properties={"key": None}) # type: ignore + catalog.create_namespace(namespace=database_name, properties={"key": None}) @pytest.mark.parametrize( @@ -636,6 +728,18 @@ def test_load_empty_namespace_properties(catalog: SqlCatalog, database_name: str assert listed_properties == {"exists": "true"} +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + ], +) +def test_load_namespace_properties_non_existing_namespace(catalog: SqlCatalog) -> None: + with pytest.raises(NoSuchNamespaceError): + catalog.load_namespace_properties("does_not_exist") + + @pytest.mark.parametrize( 'catalog', [ @@ -664,3 +768,187 @@ def test_update_namespace_properties(catalog: SqlCatalog, database_name: str) -> else: assert k in update_report.removed assert "updated test description" == catalog.load_namespace_properties(database_name)["comment"] + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +def test_commit_table(catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table = catalog.create_table(random_identifier, table_schema_nested) + + assert catalog._parse_metadata_version(table.metadata_location) == 0 + assert table.metadata.current_schema_id == 0 + + transaction = table.transaction() + update = transaction.update_schema() + update.add_column(path="b", field_type=IntegerType()) + update.commit() + transaction.commit_transaction() + + updated_table_metadata = table.metadata + + assert catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.current_schema_id == 1 + assert len(updated_table_metadata.schemas) == 2 + new_schema = next(schema for schema in updated_table_metadata.schemas if schema.schema_id == 1) + assert new_schema + assert new_schema == update._apply() + assert new_schema.find_field("b").field_type == IntegerType() + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + lazy_fixture('catalog_sqlite_fsspec'), + ], +) +def test_append_table(catalog: SqlCatalog, table_schema_simple: Schema, random_identifier: Identifier) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table = catalog.create_table(random_identifier, table_schema_simple) + + df = pa.Table.from_pydict( + { + "foo": ["a"], + "bar": [1], + "baz": [True], + }, + schema=schema_to_pyarrow(table_schema_simple), + ) + + table.append(df) + + # new snapshot is written in APPEND mode + assert len(table.metadata.snapshots) == 1 + assert table.metadata.snapshots[0].snapshot_id == table.metadata.current_snapshot_id + assert table.metadata.snapshots[0].parent_snapshot_id is None + assert table.metadata.snapshots[0].sequence_number == 1 + assert table.metadata.snapshots[0].summary is not None + assert table.metadata.snapshots[0].summary.operation == Operation.APPEND + assert table.metadata.snapshots[0].summary['added-data-files'] == '1' + assert table.metadata.snapshots[0].summary['added-records'] == '1' + assert table.metadata.snapshots[0].summary['total-data-files'] == '1' + assert table.metadata.snapshots[0].summary['total-records'] == '1' + + # read back the data + assert df == table.scan().to_arrow() + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +def test_concurrent_commit_table(catalog: SqlCatalog, table_schema_simple: Schema, random_identifier: Identifier) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table_a = catalog.create_table(random_identifier, table_schema_simple) + table_b = catalog.load_table(random_identifier) + + with table_a.update_schema() as update: + update.add_column(path="b", field_type=IntegerType()) + + with pytest.raises(CommitFailedException, match="Requirement failed: current schema id has changed: expected 0, found 1"): + # This one should fail since it already has been updated + with table_b.update_schema() as update: + update.add_column(path="c", field_type=IntegerType()) + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_write_and_evolve(catalog: SqlCatalog, format_version: int) -> None: + identifier = f"default.arrow_write_data_and_evolve_schema_v{format_version}" + + try: + catalog.create_namespace("default") + except NamespaceAlreadyExistsError: + pass + + try: + catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + pa_table = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + }, + schema=pa.schema([pa.field("foo", pa.string(), nullable=True)]), + ) + + tbl = catalog.create_table(identifier=identifier, schema=pa_table.schema, properties={"format-version": str(format_version)}) + + pa_table_with_column = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + 'bar': [19, None, 25], + }, + schema=pa.schema([ + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=True), + ]), + ) + + with tbl.transaction() as txn: + with txn.update_schema() as schema_txn: + schema_txn.union_by_name(pa_table_with_column.schema) + + with txn.update_snapshot().fast_append() as snapshot_update: + for data_file in _dataframe_to_data_files(table_metadata=txn.table_metadata, df=pa_table_with_column, io=tbl.io): + snapshot_update.append_data_file(data_file) + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +def test_table_properties_int_value(catalog: SqlCatalog, table_schema_simple: Schema, random_identifier: Identifier) -> None: + # table properties can be set to int, but still serialized to string + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + property_with_int = {"property_name": 42} + table = catalog.create_table(random_identifier, table_schema_simple, properties=property_with_int) + assert isinstance(table.properties["property_name"], str) + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +def test_table_properties_raise_for_none_value( + catalog: SqlCatalog, table_schema_simple: Schema, random_identifier: Identifier +) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + property_with_none = {"property_name": None} + with pytest.raises(ValidationError) as exc_info: + _ = catalog.create_table(random_identifier, table_schema_simple, properties=property_with_none) + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) diff --git a/tests/cli/test_console.py b/tests/cli/test_console.py index 5eec231cca..3c208c0ab1 100644 --- a/tests/cli/test_console.py +++ b/tests/cli/test_console.py @@ -14,13 +14,18 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import datetime import os +import uuid +from pathlib import PosixPath +from unittest.mock import MagicMock import pytest from click.testing import CliRunner from pytest_mock import MockFixture from pyiceberg.cli.console import run +from pyiceberg.io import WAREHOUSE from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.transforms import IdentityTransform @@ -48,8 +53,10 @@ def env_vars(mocker: MockFixture) -> None: @pytest.fixture(name="catalog") -def fixture_catalog(mocker: MockFixture) -> InMemoryCatalog: - in_memory_catalog = InMemoryCatalog("test.in.memory.catalog", **{"test.key": "test.value"}) +def fixture_catalog(mocker: MockFixture, tmp_path: PosixPath) -> InMemoryCatalog: + in_memory_catalog = InMemoryCatalog( + "test.in_memory.catalog", **{WAREHOUSE: tmp_path.absolute().as_posix(), "test.key": "test.value"} + ) mocker.patch("pyiceberg.cli.console.load_catalog", return_value=in_memory_catalog) return in_memory_catalog @@ -59,6 +66,13 @@ def fixture_namespace_properties() -> Properties: return TEST_NAMESPACE_PROPERTIES.copy() +@pytest.fixture() +def mock_datetime_now(monkeypatch: pytest.MonkeyPatch) -> None: + datetime_mock = MagicMock(wraps=datetime.datetime) + datetime_mock.now.return_value = datetime.datetime.fromtimestamp(TEST_TIMESTAMP / 1000.0).astimezone() + monkeypatch.setattr(datetime, "datetime", datetime_mock) + + TEST_TABLE_IDENTIFIER = ("default", "my_table") TEST_TABLE_NAMESPACE = "default" TEST_NAMESPACE_PROPERTIES = {"location": "s3://warehouse/database/location"} @@ -68,9 +82,10 @@ def fixture_namespace_properties() -> Properties: NestedField(2, "y", LongType(), doc="comment"), NestedField(3, "z", LongType()), ) -TEST_TABLE_LOCATION = "s3://bucket/test/location" TEST_TABLE_PARTITION_SPEC = PartitionSpec(PartitionField(name="x", transform=IdentityTransform(), source_id=1, field_id=1000)) TEST_TABLE_PROPERTIES = {"read.split.target.size": "134217728"} +TEST_TABLE_UUID = uuid.UUID("d20125c8-7284-442c-9aea-15fee620737c") +TEST_TIMESTAMP = 1602638573874 MOCK_ENVIRONMENT = {"PYICEBERG_CATALOG__PRODUCTION__URI": "test://doesnotexist"} @@ -88,7 +103,6 @@ def test_list_namespace(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -120,12 +134,13 @@ def test_describe_namespace_does_not_exists(catalog: InMemoryCatalog) -> None: assert result.output == "Namespace does not exist: ('doesnotexist',)\n" -def test_describe_table(catalog: InMemoryCatalog) -> None: +@pytest.fixture() +def test_describe_table(catalog: InMemoryCatalog, mock_datetime_now: None) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, + table_uuid=TEST_TABLE_UUID, ) runner = CliRunner() @@ -134,7 +149,7 @@ def test_describe_table(catalog: InMemoryCatalog) -> None: assert ( # Strip the whitespace on the end "\n".join([line.rstrip() for line in result.output.split("\n")]) - == """Table format version 1 + == """Table format version 2 Metadata location s3://warehouse/default/my_table/metadata/metadata.json Table UUID d20125c8-7284-442c-9aea-15fee620737c Last Updated 1602638573874 @@ -167,7 +182,6 @@ def test_schema(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -196,7 +210,6 @@ def test_spec(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -225,8 +238,8 @@ def test_uuid(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, + table_uuid=TEST_TABLE_UUID, ) runner = CliRunner() @@ -248,14 +261,12 @@ def test_location(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) - runner = CliRunner() result = runner.invoke(run, ["location", "default.my_table"]) assert result.exit_code == 0 - assert result.output == """s3://bucket/test/location\n""" + assert result.output == f"""{catalog._warehouse_location}/default/my_table\n""" def test_location_does_not_exists(catalog: InMemoryCatalog) -> None: @@ -271,7 +282,6 @@ def test_drop_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -308,11 +318,26 @@ def test_drop_namespace_does_not_exists(catalog: InMemoryCatalog) -> None: assert result.output == "Namespace does not exist: ('doesnotexist',)\n" +def test_create_namespace(catalog: InMemoryCatalog) -> None: + runner = CliRunner() + result = runner.invoke(run, ["create", "namespace", TEST_TABLE_NAMESPACE]) + assert result.exit_code == 0 + assert result.output == """Created namespace: default\n""" + + +def test_create_namespace_already_exists(catalog: InMemoryCatalog) -> None: + catalog.create_namespace(TEST_TABLE_NAMESPACE) + + runner = CliRunner() + result = runner.invoke(run, ["create", "namespace", TEST_TABLE_NAMESPACE]) + assert result.exit_code == 1 + assert result.output == "Namespace already exists: ('default',)\n" + + def test_rename_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -335,7 +360,6 @@ def test_properties_get_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -350,7 +374,6 @@ def test_properties_get_table_specific_property(catalog: InMemoryCatalog) -> Non catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -365,7 +388,6 @@ def test_properties_get_table_specific_property_that_doesnt_exist(catalog: InMem catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -434,7 +456,6 @@ def test_properties_set_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -475,7 +496,6 @@ def test_properties_remove_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -490,7 +510,6 @@ def test_properties_remove_table_property_does_not_exists(catalog: InMemoryCatal catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -522,7 +541,6 @@ def test_json_list_namespace(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -550,12 +568,13 @@ def test_json_describe_namespace_does_not_exists(catalog: InMemoryCatalog) -> No assert result.output == """{"type": "NoSuchNamespaceError", "message": "Namespace does not exist: ('doesnotexist',)"}\n""" -def test_json_describe_table(catalog: InMemoryCatalog) -> None: +@pytest.fixture() +def test_json_describe_table(catalog: InMemoryCatalog, mock_datetime_now: None) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, + table_uuid=TEST_TABLE_UUID, ) runner = CliRunner() @@ -563,7 +582,7 @@ def test_json_describe_table(catalog: InMemoryCatalog) -> None: assert result.exit_code == 0 assert ( result.output - == """{"identifier":["default","my_table"],"metadata_location":"s3://warehouse/default/my_table/metadata/metadata.json","metadata":{"location":"s3://bucket/test/location","table-uuid":"d20125c8-7284-442c-9aea-15fee620737c","last-updated-ms":1602638573874,"last-column-id":3,"schemas":[{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]}],"current-schema-id":0,"partition-specs":[{"spec-id":0,"fields":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}],"default-spec-id":0,"last-partition-id":1000,"properties":{},"snapshots":[{"snapshot-id":1925,"timestamp-ms":1602638573822}],"snapshot-log":[],"metadata-log":[],"sort-orders":[{"order-id":0,"fields":[]}],"default-sort-order-id":0,"refs":{},"format-version":1,"schema":{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]},"partition-spec":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}}\n""" + == """{"identifier":["default","my_table"],"metadata_location":"s3://warehouse/default/my_table/metadata/metadata.json","metadata":{"location":"s3://bucket/test/location","table-uuid":"d20125c8-7284-442c-9aea-15fee620737c","last-updated-ms":1602638573874,"last-column-id":3,"schemas":[{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]}],"current-schema-id":0,"partition-specs":[{"spec-id":0,"fields":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}],"default-spec-id":0,"last-partition-id":1000,"properties":{},"snapshots":[{"snapshot-id":1925,"timestamp-ms":1602638573822}],"snapshot-log":[],"metadata-log":[],"sort-orders":[{"order-id":0,"fields":[]}],"default-sort-order-id":0,"refs":{},"format-version":2,"schema":{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]},"partition-spec":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}}\n""" ) @@ -583,7 +602,6 @@ def test_json_schema(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -609,7 +627,6 @@ def test_json_spec(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -632,8 +649,8 @@ def test_json_uuid(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, + table_uuid=TEST_TABLE_UUID, ) runner = CliRunner() @@ -655,14 +672,13 @@ def test_json_location(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) runner = CliRunner() result = runner.invoke(run, ["--output=json", "location", "default.my_table"]) assert result.exit_code == 0 - assert result.output == """"s3://bucket/test/location"\n""" + assert result.output == f'"{catalog._warehouse_location}/default/my_table"\n' def test_json_location_does_not_exists(catalog: InMemoryCatalog) -> None: @@ -678,7 +694,6 @@ def test_json_drop_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -719,7 +734,6 @@ def test_json_rename_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -742,7 +756,6 @@ def test_json_properties_get_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -757,7 +770,6 @@ def test_json_properties_get_table_specific_property(catalog: InMemoryCatalog) - catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -772,7 +784,6 @@ def test_json_properties_get_table_specific_property_that_doesnt_exist(catalog: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -846,7 +857,6 @@ def test_json_properties_set_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -892,7 +902,6 @@ def test_json_properties_remove_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -907,7 +916,6 @@ def test_json_properties_remove_table_property_does_not_exists(catalog: InMemory catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) diff --git a/tests/conftest.py b/tests/conftest.py index 1197bf2feb..e090e7c020 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -24,11 +24,14 @@ and the built-in pytest fixture request should be used as an additional argument in the function. The fixture can then be retrieved using `request.getfixturevalue(fixture_name)`. """ + import os import re +import socket import string import uuid from datetime import datetime +from pathlib import Path from random import choice from tempfile import TemporaryDirectory from typing import ( @@ -38,17 +41,15 @@ Generator, List, Optional, - Union, ) -from urllib.parse import urlparse import boto3 import pytest -from moto import mock_dynamodb, mock_glue -from moto.server import ThreadedMotoServer # type: ignore +from moto import mock_aws +from pyspark.sql import SparkSession from pyiceberg import schema -from pyiceberg.catalog import Catalog +from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.catalog.noop import NoopCatalog from pyiceberg.expressions import BoundReference from pyiceberg.io import ( @@ -56,8 +57,6 @@ GCS_PROJECT_ID, GCS_TOKEN, GCS_TOKEN_EXPIRES_AT_MS, - OutputFile, - OutputStream, fsspec, load_file_io, ) @@ -84,9 +83,10 @@ from pyiceberg.utils.datetime import datetime_to_millis if TYPE_CHECKING: - from pytest import ExitCode, Session + import pyarrow as pa + from moto.server import ThreadedMotoServer # type: ignore - from pyiceberg.io.pyarrow import PyArrowFile, PyArrowFileIO + from pyiceberg.io.pyarrow import PyArrowFileIO def pytest_collection_modifyitems(items: List[pytest.Item]) -> None: @@ -265,6 +265,182 @@ def table_schema_nested_with_struct_key_map() -> Schema: ) +@pytest.fixture(scope="session") +def pyarrow_schema_simple_without_ids() -> "pa.Schema": + import pyarrow as pa + + return pa.schema([ + pa.field('foo', pa.string(), nullable=True), + pa.field('bar', pa.int32(), nullable=False), + pa.field('baz', pa.bool_(), nullable=True), + ]) + + +@pytest.fixture(scope="session") +def pyarrow_schema_nested_without_ids() -> "pa.Schema": + import pyarrow as pa + + return pa.schema([ + pa.field('foo', pa.string(), nullable=False), + pa.field('bar', pa.int32(), nullable=False), + pa.field('baz', pa.bool_(), nullable=True), + pa.field('qux', pa.list_(pa.string()), nullable=False), + pa.field( + 'quux', + pa.map_( + pa.string(), + pa.map_(pa.string(), pa.int32()), + ), + nullable=False, + ), + pa.field( + 'location', + pa.list_( + pa.struct([ + pa.field('latitude', pa.float32(), nullable=False), + pa.field('longitude', pa.float32(), nullable=False), + ]), + ), + nullable=False, + ), + pa.field( + 'person', + pa.struct([ + pa.field('name', pa.string(), nullable=True), + pa.field('age', pa.int32(), nullable=False), + ]), + nullable=True, + ), + ]) + + +@pytest.fixture(scope="session") +def iceberg_schema_simple() -> Schema: + return Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + ) + + +@pytest.fixture(scope="session") +def iceberg_schema_simple_no_ids() -> Schema: + return Schema( + NestedField(field_id=-1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=-1, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=-1, name="baz", field_type=BooleanType(), required=False), + ) + + +@pytest.fixture(scope="session") +def iceberg_table_schema_simple() -> Schema: + return Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + schema_id=0, + identifier_field_ids=[], + ) + + +@pytest.fixture(scope="session") +def iceberg_schema_nested() -> Schema: + return Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=True), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + NestedField( + field_id=4, + name="qux", + field_type=ListType(element_id=5, element_type=StringType(), element_required=False), + required=True, + ), + NestedField( + field_id=6, + name="quux", + field_type=MapType( + key_id=7, + key_type=StringType(), + value_id=8, + value_type=MapType(key_id=9, key_type=StringType(), value_id=10, value_type=IntegerType(), value_required=False), + value_required=False, + ), + required=True, + ), + NestedField( + field_id=11, + name="location", + field_type=ListType( + element_id=12, + element_type=StructType( + NestedField(field_id=13, name="latitude", field_type=FloatType(), required=True), + NestedField(field_id=14, name="longitude", field_type=FloatType(), required=True), + ), + element_required=False, + ), + required=True, + ), + NestedField( + field_id=15, + name="person", + field_type=StructType( + NestedField(field_id=16, name="name", field_type=StringType(), required=False), + NestedField(field_id=17, name="age", field_type=IntegerType(), required=True), + ), + required=False, + ), + ) + + +@pytest.fixture(scope="session") +def iceberg_schema_nested_no_ids() -> Schema: + return Schema( + NestedField(field_id=-1, name="foo", field_type=StringType(), required=True), + NestedField(field_id=-1, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=-1, name="baz", field_type=BooleanType(), required=False), + NestedField( + field_id=-1, + name="qux", + field_type=ListType(element_id=-1, element_type=StringType(), element_required=False), + required=True, + ), + NestedField( + field_id=-1, + name="quux", + field_type=MapType( + key_id=-1, + key_type=StringType(), + value_id=-1, + value_type=MapType(key_id=-1, key_type=StringType(), value_id=-1, value_type=IntegerType(), value_required=False), + value_required=False, + ), + required=True, + ), + NestedField( + field_id=-1, + name="location", + field_type=ListType( + element_id=-1, + element_type=StructType( + NestedField(field_id=-1, name="latitude", field_type=FloatType(), required=True), + NestedField(field_id=-1, name="longitude", field_type=FloatType(), required=True), + ), + element_required=False, + ), + required=True, + ), + NestedField( + field_id=-1, + name="person", + field_type=StructType( + NestedField(field_id=-1, name="name", field_type=StringType(), required=False), + NestedField(field_id=-1, name="age", field_type=IntegerType(), required=True), + ), + required=False, + ), + ) + + @pytest.fixture(scope="session") def all_avro_types() -> Dict[str, Any]: return { @@ -716,7 +892,7 @@ def metadata_location_gz(tmp_path_factory: pytest.TempPathFactory) -> str: "data_file": { "file_path": "/home/iceberg/warehouse/nyc/taxis_partitioned/data/VendorID=1/00000-633-d8a4223e-dc97-45a1-86e1-adaba6e8abd7-00002.parquet", "file_format": "PARQUET", - "partition": {"VendorID": 1, "tpep_pickup_datetime": 1925}, + "partition": {"VendorID": 1, "tpep_pickup_datetime": None}, "record_count": 95050, "file_size_in_bytes": 1265950, "block_size_in_bytes": 67108864, @@ -1278,40 +1454,6 @@ def simple_map() -> MapType: return MapType(key_id=19, key_type=StringType(), value_id=25, value_type=DoubleType(), value_required=False) -class LocalOutputFile(OutputFile): - """An OutputFile implementation for local files (for test use only).""" - - def __init__(self, location: str) -> None: - parsed_location = urlparse(location) # Create a ParseResult from the uri - if ( - parsed_location.scheme and parsed_location.scheme != "file" - ): # Validate that an uri is provided with a scheme of `file` - raise ValueError("LocalOutputFile location must have a scheme of `file`") - elif parsed_location.netloc: - raise ValueError(f"Network location is not allowed for LocalOutputFile: {parsed_location.netloc}") - - super().__init__(location=location) - self._path = parsed_location.path - - def __len__(self) -> int: - """Return the length of an instance of the LocalOutputFile class.""" - return os.path.getsize(self._path) - - def exists(self) -> bool: - return os.path.exists(self._path) - - def to_input_file(self) -> "PyArrowFile": - from pyiceberg.io.pyarrow import PyArrowFileIO - - return PyArrowFileIO().new_input(location=self.location) - - def create(self, overwrite: bool = False) -> OutputStream: - output_file = open(self._path, "wb" if overwrite else "xb") - if not issubclass(type(output_file), OutputStream): - raise TypeError("Object returned from LocalOutputFile.create(...) does not match the OutputStream protocol.") - return output_file - - @pytest.fixture(scope="session") def generated_manifest_entry_file(avro_schema_manifest_entry: Dict[str, Any]) -> Generator[str, None, None]: from fastavro import parse_schema, writer @@ -1586,46 +1728,45 @@ def fixture_aws_credentials() -> Generator[None, None, None]: os.environ.pop("AWS_DEFAULT_REGION") -MOTO_SERVER = ThreadedMotoServer(port=5000) - +@pytest.fixture(scope="session") +def moto_server() -> "ThreadedMotoServer": + from moto.server import ThreadedMotoServer -def pytest_sessionfinish( - session: "Session", - exitstatus: Union[int, "ExitCode"], -) -> None: - if MOTO_SERVER._server_ready: - MOTO_SERVER.stop() + server = ThreadedMotoServer(ip_address="localhost", port=5001) + # this will throw an exception if the port is already in use + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind((server._ip_address, server._port)) -@pytest.fixture(scope="session") -def moto_server() -> ThreadedMotoServer: - MOTO_SERVER.start() - return MOTO_SERVER + server.start() + yield server + server.stop() @pytest.fixture(scope="session") -def moto_endpoint_url(moto_server: ThreadedMotoServer) -> str: +def moto_endpoint_url(moto_server: "ThreadedMotoServer") -> str: _url = f"http://{moto_server._ip_address}:{moto_server._port}" return _url -@pytest.fixture(name="_s3") +@pytest.fixture(name="_s3", scope="function") def fixture_s3(_aws_credentials: None, moto_endpoint_url: str) -> Generator[boto3.client, None, None]: """Yield a mocked S3 client.""" - yield boto3.client("s3", region_name="us-east-1", endpoint_url=moto_endpoint_url) + with mock_aws(): + yield boto3.client("s3", region_name="us-east-1", endpoint_url=moto_endpoint_url) @pytest.fixture(name="_glue") def fixture_glue(_aws_credentials: None) -> Generator[boto3.client, None, None]: """Yield a mocked glue client.""" - with mock_glue(): + with mock_aws(): yield boto3.client("glue", region_name="us-east-1") @pytest.fixture(name="_dynamodb") def fixture_dynamodb(_aws_credentials: None) -> Generator[boto3.client, None, None]: """Yield a mocked DynamoDB client.""" - with mock_dynamodb(): + with mock_aws(): yield boto3.client("dynamodb", region_name="us-east-1") @@ -1687,7 +1828,7 @@ def database_list(database_name: str) -> List[str]: TABLE_METADATA_LOCATION_REGEX = re.compile( r"""s3://test_bucket/my_iceberg_database-[a-z]{20}.db/ my_iceberg_table-[a-z]{20}/metadata/ - 00000-[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}.metadata.json""", + [0-9]{5}-[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}.metadata.json""", re.X, ) @@ -1717,7 +1858,8 @@ def get_s3_path(bucket_name: str, database_name: Optional[str] = None, table_nam @pytest.fixture(name="s3", scope="module") def fixture_s3_client() -> boto3.client: - yield boto3.client("s3") + with mock_aws(): + yield boto3.client("s3") def clean_up(test_catalog: Catalog) -> None: @@ -1752,6 +1894,11 @@ def example_task(data_file: str) -> FileScanTask: ) +@pytest.fixture(scope="session") +def warehouse(tmp_path_factory: pytest.TempPathFactory) -> Path: + return tmp_path_factory.mktemp("test_sql") + + @pytest.fixture def table_v1(example_table_metadata_v1: Dict[str, Any]) -> Table: table_metadata = TableMetadataV1(**example_table_metadata_v1) @@ -1779,3 +1926,53 @@ def table_v2(example_table_metadata_v2: Dict[str, Any]) -> Table: @pytest.fixture def bound_reference_str() -> BoundReference[str]: return BoundReference(field=NestedField(1, "field", StringType(), required=False), accessor=Accessor(position=0, inner=None)) + + +@pytest.fixture(scope="session") +def session_catalog() -> Catalog: + return load_catalog( + "local", + **{ + "type": "rest", + "uri": "http://localhost:8181", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) + + +@pytest.fixture(scope="session") +def spark() -> SparkSession: + import importlib.metadata + import os + + spark_version = ".".join(importlib.metadata.version("pyspark").split(".")[:2]) + scala_version = "2.12" + iceberg_version = "1.4.3" + + os.environ["PYSPARK_SUBMIT_ARGS"] = ( + f"--packages org.apache.iceberg:iceberg-spark-runtime-{spark_version}_{scala_version}:{iceberg_version}," + f"org.apache.iceberg:iceberg-aws-bundle:{iceberg_version} pyspark-shell" + ) + os.environ["AWS_REGION"] = "us-east-1" + os.environ["AWS_ACCESS_KEY_ID"] = "admin" + os.environ["AWS_SECRET_ACCESS_KEY"] = "password" + + spark = ( + SparkSession.builder.appName("PyIceberg integration test") + .config("spark.sql.session.timeZone", "UTC") + .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") + .config("spark.sql.catalog.integration", "org.apache.iceberg.spark.SparkCatalog") + .config("spark.sql.catalog.integration.catalog-impl", "org.apache.iceberg.rest.RESTCatalog") + .config("spark.sql.catalog.integration.cache-enabled", "false") + .config("spark.sql.catalog.integration.uri", "http://localhost:8181") + .config("spark.sql.catalog.integration.io-impl", "org.apache.iceberg.aws.s3.S3FileIO") + .config("spark.sql.catalog.integration.warehouse", "s3://warehouse/wh/") + .config("spark.sql.catalog.integration.s3.endpoint", "http://localhost:9000") + .config("spark.sql.catalog.integration.s3.path-style-access", "true") + .config("spark.sql.defaultCatalog", "integration") + .getOrCreate() + ) + + return spark diff --git a/tests/expressions/test_expressions.py b/tests/expressions/test_expressions.py index bce23bf39c..f277672d87 100644 --- a/tests/expressions/test_expressions.py +++ b/tests/expressions/test_expressions.py @@ -476,7 +476,8 @@ def test_less_than_or_equal_invert() -> None: ) def test_bind(pred: UnboundPredicate[Any], table_schema_simple: Schema) -> None: assert pred.bind(table_schema_simple, case_sensitive=True).term.field == table_schema_simple.find_field( # type: ignore - pred.term.name, case_sensitive=True # type: ignore + pred.term.name, # type: ignore + case_sensitive=True, ) @@ -495,7 +496,8 @@ def test_bind(pred: UnboundPredicate[Any], table_schema_simple: Schema) -> None: ) def test_bind_case_insensitive(pred: UnboundPredicate[Any], table_schema_simple: Schema) -> None: assert pred.bind(table_schema_simple, case_sensitive=False).term.field == table_schema_simple.find_field( # type: ignore - pred.term.name, case_sensitive=False # type: ignore + pred.term.name, # type: ignore + case_sensitive=False, ) diff --git a/tests/expressions/test_parser.py b/tests/expressions/test_parser.py index 3ce2f2226c..0bccc9b80f 100644 --- a/tests/expressions/test_parser.py +++ b/tests/expressions/test_parser.py @@ -17,6 +17,7 @@ import pytest from pyparsing import ParseException +import pyiceberg.expressions.parser as parser from pyiceberg.expressions import ( AlwaysFalse, AlwaysTrue, @@ -37,7 +38,6 @@ NotStartsWith, Or, StartsWith, - parser, ) diff --git a/tests/expressions/test_visitors.py b/tests/expressions/test_visitors.py index 50db90ceac..94bfcf076c 100644 --- a/tests/expressions/test_visitors.py +++ b/tests/expressions/test_visitors.py @@ -169,15 +169,11 @@ def visit_not_equal(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[s self.visit_history.append("NOT_EQUAL") return self.visit_history - def visit_greater_than_or_equal( - self, term: BoundTerm[Any], literal: Literal[Any] - ) -> List[str]: # pylint: disable=redefined-outer-name + def visit_greater_than_or_equal(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[str]: # pylint: disable=redefined-outer-name self.visit_history.append("GREATER_THAN_OR_EQUAL") return self.visit_history - def visit_greater_than( - self, term: BoundTerm[Any], literal: Literal[Any] - ) -> List[str]: # pylint: disable=redefined-outer-name + def visit_greater_than(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[str]: # pylint: disable=redefined-outer-name self.visit_history.append("GREATER_THAN") return self.visit_history @@ -185,9 +181,7 @@ def visit_less_than(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[s self.visit_history.append("LESS_THAN") return self.visit_history - def visit_less_than_or_equal( - self, term: BoundTerm[Any], literal: Literal[Any] - ) -> List[str]: # pylint: disable=redefined-outer-name + def visit_less_than_or_equal(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[str]: # pylint: disable=redefined-outer-name self.visit_history.append("LESS_THAN_OR_EQUAL") return self.visit_history diff --git a/tests/integration/test_partition_evolution.py b/tests/integration/test_partition_evolution.py new file mode 100644 index 0000000000..85ae32374d --- /dev/null +++ b/tests/integration/test_partition_evolution.py @@ -0,0 +1,490 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint:disable=redefined-outer-name + +import pytest + +from pyiceberg.catalog import Catalog, load_catalog +from pyiceberg.exceptions import NoSuchTableError +from pyiceberg.partitioning import PartitionField, PartitionSpec +from pyiceberg.schema import Schema +from pyiceberg.table import Table +from pyiceberg.transforms import ( + BucketTransform, + DayTransform, + HourTransform, + IdentityTransform, + MonthTransform, + TruncateTransform, + VoidTransform, + YearTransform, +) +from pyiceberg.types import ( + LongType, + NestedField, + StringType, + TimestampType, +) + + +@pytest.fixture() +def catalog_rest() -> Catalog: + return load_catalog( + "local", + **{ + "type": "rest", + "uri": "http://localhost:8181", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) + + +@pytest.fixture() +def catalog_hive() -> Catalog: + return load_catalog( + "local", + **{ + "type": "hive", + "uri": "http://localhost:9083", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) + + +def _simple_table(catalog: Catalog, table_schema_simple: Schema) -> Table: + return _create_table_with_schema(catalog, table_schema_simple, "1") + + +def _table(catalog: Catalog) -> Table: + schema_with_timestamp = Schema( + NestedField(1, "id", LongType(), required=False), + NestedField(2, "event_ts", TimestampType(), required=False), + NestedField(3, "str", StringType(), required=False), + ) + return _create_table_with_schema(catalog, schema_with_timestamp, "1") + + +def _table_v2(catalog: Catalog) -> Table: + schema_with_timestamp = Schema( + NestedField(1, "id", LongType(), required=False), + NestedField(2, "event_ts", TimestampType(), required=False), + NestedField(3, "str", StringType(), required=False), + ) + return _create_table_with_schema(catalog, schema_with_timestamp, "2") + + +def _create_table_with_schema(catalog: Catalog, schema: Schema, format_version: str) -> Table: + tbl_name = "default.test_schema_evolution" + try: + catalog.drop_table(tbl_name) + except NoSuchTableError: + pass + return catalog.create_table(identifier=tbl_name, schema=schema, properties={"format-version": format_version}) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_identity_partition(catalog: Catalog, table_schema_simple: Schema) -> None: + simple_table = _simple_table(catalog, table_schema_simple) + simple_table.update_spec().add_identity("foo").commit() + specs = simple_table.specs() + assert len(specs) == 2 + spec = simple_table.spec() + assert spec.spec_id == 1 + assert spec.last_assigned_field_id == 1000 + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_year(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", YearTransform(), "year_transform").commit() + _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, YearTransform(), "year_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_month(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", MonthTransform(), "month_transform").commit() + _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, MonthTransform(), "month_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_day(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", DayTransform(), "day_transform").commit() + _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, DayTransform(), "day_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_hour(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", HourTransform(), "hour_transform").commit() + _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, HourTransform(), "hour_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_bucket(catalog: Catalog, table_schema_simple: Schema) -> None: + simple_table = _create_table_with_schema(catalog, table_schema_simple, "1") + simple_table.update_spec().add_field("foo", BucketTransform(12), "bucket_transform").commit() + _validate_new_partition_fields(simple_table, 1000, 1, 1000, PartitionField(1, 1000, BucketTransform(12), "bucket_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_truncate(catalog: Catalog, table_schema_simple: Schema) -> None: + simple_table = _create_table_with_schema(catalog, table_schema_simple, "1") + simple_table.update_spec().add_field("foo", TruncateTransform(1), "truncate_transform").commit() + _validate_new_partition_fields( + simple_table, 1000, 1, 1000, PartitionField(1, 1000, TruncateTransform(1), "truncate_transform") + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_multiple_adds(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_identity("id").add_field("event_ts", HourTransform(), "hourly_partitioned").add_field( + "str", TruncateTransform(2), "truncate_str" + ).commit() + _validate_new_partition_fields( + table, + 1002, + 1, + 1002, + PartitionField(1, 1000, IdentityTransform(), "id"), + PartitionField(2, 1001, HourTransform(), "hourly_partitioned"), + PartitionField(3, 1002, TruncateTransform(2), "truncate_str"), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_hour_to_day(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", DayTransform(), "daily_partitioned").commit() + table.update_spec().add_field("event_ts", HourTransform(), "hourly_partitioned").commit() + _validate_new_partition_fields( + table, + 1001, + 2, + 1001, + PartitionField(2, 1000, DayTransform(), "daily_partitioned"), + PartitionField(2, 1001, HourTransform(), "hourly_partitioned"), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_multiple_buckets(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("id", BucketTransform(16)).add_field("id", BucketTransform(4)).commit() + _validate_new_partition_fields( + table, + 1001, + 1, + 1001, + PartitionField(1, 1000, BucketTransform(16), "id_bucket_16"), + PartitionField(1, 1001, BucketTransform(4), "id_bucket_4"), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_identity(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_identity("id").commit() + table.update_spec().remove_field("id").commit() + assert len(table.specs()) == 3 + assert table.spec().spec_id == 2 + assert table.spec() == PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=VoidTransform(), name='id'), spec_id=2 + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_identity_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + table_v2.update_spec().add_identity("id").commit() + table_v2.update_spec().remove_field("id").commit() + assert len(table_v2.specs()) == 2 + assert table_v2.spec().spec_id == 0 + assert table_v2.spec() == PartitionSpec(spec_id=0) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_bucket(catalog: Catalog) -> None: + table = _table(catalog) + with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table.update_spec() as remove: + remove.remove_field("bucketed_id") + + assert len(table.specs()) == 3 + _validate_new_partition_fields( + table, + 1001, + 2, + 1001, + PartitionField(source_id=1, field_id=1000, transform=VoidTransform(), name='bucketed_id'), + PartitionField(source_id=2, field_id=1001, transform=DayTransform(), name='day_ts'), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_bucket_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table_v2.update_spec() as remove: + remove.remove_field("bucketed_id") + assert len(table_v2.specs()) == 3 + _validate_new_partition_fields( + table_v2, 1001, 2, 1001, PartitionField(source_id=2, field_id=1001, transform=DayTransform(), name='day_ts') + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_day(catalog: Catalog) -> None: + table = _table(catalog) + with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table.update_spec() as remove: + remove.remove_field("day_ts") + + assert len(table.specs()) == 3 + _validate_new_partition_fields( + table, + 1001, + 2, + 1001, + PartitionField(source_id=1, field_id=1000, transform=BucketTransform(16), name='bucketed_id'), + PartitionField(source_id=2, field_id=1001, transform=VoidTransform(), name='day_ts'), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_day_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table_v2.update_spec() as remove: + remove.remove_field("day_ts") + assert len(table_v2.specs()) == 3 + _validate_new_partition_fields( + table_v2, 1000, 2, 1001, PartitionField(source_id=1, field_id=1000, transform=BucketTransform(16), name='bucketed_id') + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_rename(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_identity("id").commit() + table.update_spec().rename_field("id", "sharded_id").commit() + assert len(table.specs()) == 3 + assert table.spec().spec_id == 2 + _validate_new_partition_fields(table, 1000, 2, 1000, PartitionField(1, 1000, IdentityTransform(), "sharded_id")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_and_remove(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_identity("id").remove_field("id").commit() + assert "Cannot delete newly added field id" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_redundant_time_partition(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_field("event_ts", YearTransform(), "year_transform").add_field( + "event_ts", HourTransform(), "hour_transform" + ).commit() + assert "Cannot add time partition field: hour_transform conflicts with year_transform" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_delete_and_rename(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_identity("id").commit() + table.update_spec().remove_field("id").rename_field("id", "sharded_id").commit() + assert "Cannot delete and rename partition field id" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_rename_and_delete(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_identity("id").commit() + table.update_spec().rename_field("id", "sharded_id").remove_field("id").commit() + assert "Cannot rename and delete field id" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_same_tranform_for_same_field(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_field("str", TruncateTransform(4), "truncated_str").add_field( + "str", TruncateTransform(4) + ).commit() + assert "Already added partition" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_same_field_multiple_times(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_field("id", IdentityTransform(), "duplicate").add_field( + "id", IdentityTransform(), "duplicate" + ).commit() + assert "Already added partition" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_multiple_specs_same_name(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_field("id", IdentityTransform(), "duplicate").add_field( + "event_ts", IdentityTransform(), "duplicate" + ).commit() + assert "Already added partition" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_change_specs_and_schema_transaction(catalog: Catalog) -> None: + table = _table(catalog) + with table.transaction() as transaction: + with transaction.update_spec() as update_spec: + update_spec.add_identity("id").add_field("event_ts", HourTransform(), "hourly_partitioned").add_field( + "str", TruncateTransform(2), "truncate_str" + ) + + with transaction.update_schema() as update_schema: + update_schema.add_column("col_string", StringType()) + + _validate_new_partition_fields( + table, + 1002, + 1, + 1002, + PartitionField(1, 1000, IdentityTransform(), "id"), + PartitionField(2, 1001, HourTransform(), "hourly_partitioned"), + PartitionField(3, 1002, TruncateTransform(2), "truncate_str"), + ) + + assert table.schema() == Schema( + NestedField(field_id=1, name='id', field_type=LongType(), required=False), + NestedField(field_id=2, name='event_ts', field_type=TimestampType(), required=False), + NestedField(field_id=3, name='str', field_type=StringType(), required=False), + NestedField(field_id=4, name='col_string', field_type=StringType(), required=False), + identifier_field_ids=[], + ) + assert table.schema().schema_id == 1 + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_multiple_adds_and_remove_v1(catalog: Catalog) -> None: + table = _table(catalog) + with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table.update_spec() as update: + update.remove_field("day_ts").remove_field("bucketed_id") + with table.update_spec() as update: + update.add_field("str", TruncateTransform(2), "truncated_str") + _validate_new_partition_fields( + table, + 1002, + 3, + 1002, + PartitionField(1, 1000, VoidTransform(), "bucketed_id"), + PartitionField(2, 1001, VoidTransform(), "day_ts"), + PartitionField(3, 1002, TruncateTransform(2), "truncated_str"), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_multiple_adds_and_remove_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table_v2.update_spec() as update: + update.remove_field("day_ts").remove_field("bucketed_id") + with table_v2.update_spec() as update: + update.add_field("str", TruncateTransform(2), "truncated_str") + _validate_new_partition_fields(table_v2, 1002, 2, 1002, PartitionField(3, 1002, TruncateTransform(2), "truncated_str")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_multiple_remove_and_add_reuses_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table_v2.update_spec() as update: + update.remove_field("day_ts").remove_field("bucketed_id") + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + _validate_new_partition_fields(table_v2, 1000, 2, 1001, PartitionField(1, 1000, BucketTransform(16), "bucketed_id")) + + +def _validate_new_partition_fields( + table: Table, + expected_spec_last_assigned_field_id: int, + expected_spec_id: int, + expected_metadata_last_assigned_field_id: int, + *expected_partition_fields: PartitionField, +) -> None: + spec = table.spec() + assert spec.spec_id == expected_spec_id + assert spec.last_assigned_field_id == expected_spec_last_assigned_field_id + assert table.last_partition_id() == expected_metadata_last_assigned_field_id + assert len(spec.fields) == len(expected_partition_fields) + for i in range(len(spec.fields)): + assert spec.fields[i] == expected_partition_fields[i] diff --git a/tests/integration/test_partitioning_key.py b/tests/integration/test_partitioning_key.py new file mode 100644 index 0000000000..12056bac1e --- /dev/null +++ b/tests/integration/test_partitioning_key.py @@ -0,0 +1,772 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint:disable=redefined-outer-name +import uuid +from datetime import date, datetime, timedelta, timezone +from decimal import Decimal +from typing import Any, List + +import pytest +from pyspark.sql import SparkSession +from pyspark.sql.utils import AnalysisException + +from pyiceberg.catalog import Catalog +from pyiceberg.partitioning import PartitionField, PartitionFieldValue, PartitionKey, PartitionSpec +from pyiceberg.schema import Schema +from pyiceberg.transforms import ( + BucketTransform, + DayTransform, + HourTransform, + IdentityTransform, + MonthTransform, + TruncateTransform, + YearTransform, +) +from pyiceberg.typedef import Record +from pyiceberg.types import ( + BinaryType, + BooleanType, + DateType, + DecimalType, + DoubleType, + FixedType, + FloatType, + IntegerType, + LongType, + NestedField, + StringType, + TimestampType, + TimestamptzType, + UUIDType, +) + +TABLE_SCHEMA = Schema( + NestedField(field_id=1, name="boolean_field", field_type=BooleanType(), required=False), + NestedField(field_id=2, name="string_field", field_type=StringType(), required=False), + NestedField(field_id=3, name="string_long_field", field_type=StringType(), required=False), + NestedField(field_id=4, name="int_field", field_type=IntegerType(), required=False), + NestedField(field_id=5, name="long_field", field_type=LongType(), required=False), + NestedField(field_id=6, name="float_field", field_type=FloatType(), required=False), + NestedField(field_id=7, name="double_field", field_type=DoubleType(), required=False), + NestedField(field_id=8, name="timestamp_field", field_type=TimestampType(), required=False), + NestedField(field_id=9, name="timestamptz_field", field_type=TimestamptzType(), required=False), + NestedField(field_id=10, name="date_field", field_type=DateType(), required=False), + # NestedField(field_id=11, name="time", field_type=TimeType(), required=False), + NestedField(field_id=11, name="binary_field", field_type=BinaryType(), required=False), + NestedField(field_id=12, name="fixed_field", field_type=FixedType(16), required=False), + NestedField(field_id=13, name="decimal_field", field_type=DecimalType(5, 2), required=False), + NestedField(field_id=14, name="uuid_field", field_type=UUIDType(), required=False), +) + + +identifier = "default.test_table" + + +@pytest.mark.parametrize( + "partition_fields, partition_values, expected_partition_record, expected_hive_partition_path_slice, spark_create_table_sql_for_justification, spark_data_insert_sql_for_justification", + [ + # # Identity Transform + ( + [PartitionField(source_id=1, field_id=1001, transform=IdentityTransform(), name="boolean_field")], + [False], + Record(boolean_field=False), + "boolean_field=false", + f"""CREATE TABLE {identifier} ( + boolean_field boolean, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(boolean_field) -- Partitioning by 'boolean_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (false, 'Boolean field set to false'); + """, + ), + ( + [PartitionField(source_id=2, field_id=1001, transform=IdentityTransform(), name="string_field")], + ["sample_string"], + Record(string_field="sample_string"), + "string_field=sample_string", + f"""CREATE TABLE {identifier} ( + string_field string, + another_string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(string_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + ('sample_string', 'Another string value') + """, + ), + ( + [PartitionField(source_id=4, field_id=1001, transform=IdentityTransform(), name="int_field")], + [42], + Record(int_field=42), + "int_field=42", + f"""CREATE TABLE {identifier} ( + int_field int, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(int_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (42, 'Associated string value for int 42') + """, + ), + ( + [PartitionField(source_id=5, field_id=1001, transform=IdentityTransform(), name="long_field")], + [1234567890123456789], + Record(long_field=1234567890123456789), + "long_field=1234567890123456789", + f"""CREATE TABLE {identifier} ( + long_field bigint, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(long_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (1234567890123456789, 'Associated string value for long 1234567890123456789') + """, + ), + ( + [PartitionField(source_id=6, field_id=1001, transform=IdentityTransform(), name="float_field")], + [3.14], + Record(float_field=3.14), + "float_field=3.14", + # spark writes differently as pyiceberg, Record[float_field=3.140000104904175], path:float_field=3.14 (Record has difference) + # so justification (compare expected value with spark behavior) would fail. + None, + None, + # f"""CREATE TABLE {identifier} ( + # float_field float, + # string_field string + # ) + # USING iceberg + # PARTITIONED BY ( + # identity(float_field) + # ) + # """, + # f"""INSERT INTO {identifier} + # VALUES + # (3.14, 'Associated string value for float 3.14') + # """ + ), + ( + [PartitionField(source_id=7, field_id=1001, transform=IdentityTransform(), name="double_field")], + [6.282], + Record(double_field=6.282), + "double_field=6.282", + # spark writes differently as pyiceberg, Record[double_field=6.2820000648498535] path:double_field=6.282 (Record has difference) + # so justification (compare expected value with spark behavior) would fail. + None, + None, + # f"""CREATE TABLE {identifier} ( + # double_field double, + # string_field string + # ) + # USING iceberg + # PARTITIONED BY ( + # identity(double_field) + # ) + # """, + # f"""INSERT INTO {identifier} + # VALUES + # (6.282, 'Associated string value for double 6.282') + # """ + ), + ( + [PartitionField(source_id=8, field_id=1001, transform=IdentityTransform(), name="timestamp_field")], + [datetime(2023, 1, 1, 12, 0, 1, 999)], + Record(timestamp_field=1672574401000999), + "timestamp_field=2023-01-01T12%3A00%3A01.000999", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp_ntz, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(timestamp_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999' AS TIMESTAMP_NTZ), 'Associated string value for timestamp 2023-01-01T12:00:00') + """, + ), + ( + [PartitionField(source_id=8, field_id=1001, transform=IdentityTransform(), name="timestamp_field")], + [datetime(2023, 1, 1, 12, 0, 1)], + Record(timestamp_field=1672574401000000), + "timestamp_field=2023-01-01T12%3A00%3A01", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp_ntz, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(timestamp_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01' AS TIMESTAMP_NTZ), 'Associated string value for timestamp 2023-01-01T12:00:00') + """, + ), + ( + [PartitionField(source_id=8, field_id=1001, transform=IdentityTransform(), name="timestamp_field")], + [datetime(2023, 1, 1, 12, 0, 0)], + Record(timestamp_field=1672574400000000), + "timestamp_field=2023-01-01T12%3A00%3A00", + # Spark writes differently as pyiceberg, so justification (compare expected value with spark behavior) would fail + # AssertionError: assert 'timestamp_field=2023-01-01T12%3A00%3A00' in 's3://warehouse/default/test_table/data/timestamp_field=2023-01-01T12%3A00/00000-5-f9dca69a-9fb7-4830-9ef6-62d3d7afc09e-00001.parquet' + # TLDR: CAST('2023-01-01 12:00:00' AS TIMESTAMP_NTZ) becomes 2023-01-01T12:00 in the hive partition path when spark writes it (without the seconds). + None, + None, + # f"""CREATE TABLE {identifier} ( + # timestamp_field timestamp_ntz, + # string_field string + # ) + # USING iceberg + # PARTITIONED BY ( + # identity(timestamp_field) + # ) + # """, + # f"""INSERT INTO {identifier} + # VALUES + # (CAST('2023-01-01 12:00:00' AS TIMESTAMP_NTZ), 'Associated string value for timestamp 2023-01-01T12:00:00') + # """ + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=IdentityTransform(), name="timestamptz_field")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field=1672563601000999), + "timestamptz_field=2023-01-01T09%3A00%3A01.000999%2B00%3A00", + # Spark writes differently as pyiceberg, so justification (compare expected value with spark behavior) would fail + # AssertionError: assert 'timestamptz_field=2023-01-01T09%3A00%3A01.000999%2B00%3A00' in 's3://warehouse/default/test_table/data/timestamptz_field=2023-01-01T09%3A00%3A01.000999Z/00000-5-b710fc4d-66b6-47f1-b8ae-6208f8aaa2d4-00001.parquet' + # TLDR: CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP) becomes 2023-01-01T09:00:01.000999Z in the hive partition path when spark writes it (while iceberg: timestamptz_field=2023-01-01T09:00:01.000999+00:00). + None, + None, + # f"""CREATE TABLE {identifier} ( + # timestamptz_field timestamp, + # string_field string + # ) + # USING iceberg + # PARTITIONED BY ( + # identity(timestamptz_field) + # ) + # """, + # f"""INSERT INTO {identifier} + # VALUES + # (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Associated string value for timestamp 2023-01-01 12:00:01.000999+03:00') + # """ + ), + ( + [PartitionField(source_id=10, field_id=1001, transform=IdentityTransform(), name="date_field")], + [date(2023, 1, 1)], + Record(date_field=19358), + "date_field=2023-01-01", + f"""CREATE TABLE {identifier} ( + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(date_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Associated string value for date 2023-01-01') + """, + ), + ( + [PartitionField(source_id=14, field_id=1001, transform=IdentityTransform(), name="uuid_field")], + [uuid.UUID("f47ac10b-58cc-4372-a567-0e02b2c3d479")], + Record(uuid_field="f47ac10b-58cc-4372-a567-0e02b2c3d479"), + "uuid_field=f47ac10b-58cc-4372-a567-0e02b2c3d479", + f"""CREATE TABLE {identifier} ( + uuid_field string, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(uuid_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + ('f47ac10b-58cc-4372-a567-0e02b2c3d479', 'Associated string value for UUID f47ac10b-58cc-4372-a567-0e02b2c3d479') + """, + ), + ( + [PartitionField(source_id=11, field_id=1001, transform=IdentityTransform(), name="binary_field")], + [b'example'], + Record(binary_field=b'example'), + "binary_field=ZXhhbXBsZQ%3D%3D", + f"""CREATE TABLE {identifier} ( + binary_field binary, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(binary_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('example' AS BINARY), 'Associated string value for binary `example`') + """, + ), + ( + [PartitionField(source_id=13, field_id=1001, transform=IdentityTransform(), name="decimal_field")], + [Decimal('123.45')], + Record(decimal_field=Decimal('123.45')), + "decimal_field=123.45", + f"""CREATE TABLE {identifier} ( + decimal_field decimal(5,2), + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(decimal_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (123.45, 'Associated string value for decimal 123.45') + """, + ), + # # Year Month Day Hour Transform + # Month Transform + ( + [PartitionField(source_id=8, field_id=1001, transform=MonthTransform(), name="timestamp_field_month")], + [datetime(2023, 1, 1, 11, 55, 59, 999999)], + Record(timestamp_field_month=((2023 - 1970) * 12)), + "timestamp_field_month=2023-01", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp_ntz, + string_field string + ) + USING iceberg + PARTITIONED BY ( + month(timestamp_field) -- Partitioning by month from 'timestamp_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 11:55:59.999999' AS TIMESTAMP_NTZ), 'Event at 2023-01-01 11:55:59.999999'); + """, + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=MonthTransform(), name="timestamptz_field_month")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field_month=((2023 - 1970) * 12 + 1 - 1)), + "timestamptz_field_month=2023-01", + f"""CREATE TABLE {identifier} ( + timestamptz_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + month(timestamptz_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Event at 2023-01-01 12:00:01.000999+03:00'); + """, + ), + ( + [PartitionField(source_id=10, field_id=1001, transform=MonthTransform(), name="date_field_month")], + [date(2023, 1, 1)], + Record(date_field_month=((2023 - 1970) * 12)), + "date_field_month=2023-01", + f"""CREATE TABLE {identifier} ( + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + month(date_field) -- Partitioning by month from 'date_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Event on 2023-01-01'); + """, + ), + # Year Transform + ( + [PartitionField(source_id=8, field_id=1001, transform=YearTransform(), name="timestamp_field_year")], + [datetime(2023, 1, 1, 11, 55, 59, 999999)], + Record(timestamp_field_year=(2023 - 1970)), + "timestamp_field_year=2023", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + year(timestamp_field) -- Partitioning by year from 'timestamp_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 11:55:59.999999' AS TIMESTAMP), 'Event at 2023-01-01 11:55:59.999999'); + """, + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=YearTransform(), name="timestamptz_field_year")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field_year=53), + "timestamptz_field_year=2023", + f"""CREATE TABLE {identifier} ( + timestamptz_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + year(timestamptz_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Event at 2023-01-01 12:00:01.000999+03:00'); + """, + ), + ( + [PartitionField(source_id=10, field_id=1001, transform=YearTransform(), name="date_field_year")], + [date(2023, 1, 1)], + Record(date_field_year=(2023 - 1970)), + "date_field_year=2023", + f"""CREATE TABLE {identifier} ( + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + year(date_field) -- Partitioning by year from 'date_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Event on 2023-01-01'); + """, + ), + # # Day Transform + ( + [PartitionField(source_id=8, field_id=1001, transform=DayTransform(), name="timestamp_field_day")], + [datetime(2023, 1, 1, 11, 55, 59, 999999)], + Record(timestamp_field_day=19358), + "timestamp_field_day=2023-01-01", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + day(timestamp_field) -- Partitioning by day from 'timestamp_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Event on 2023-01-01'); + """, + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=DayTransform(), name="timestamptz_field_day")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field_day=19358), + "timestamptz_field_day=2023-01-01", + f"""CREATE TABLE {identifier} ( + timestamptz_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + day(timestamptz_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Event at 2023-01-01 12:00:01.000999+03:00'); + """, + ), + ( + [PartitionField(source_id=10, field_id=1001, transform=DayTransform(), name="date_field_day")], + [date(2023, 1, 1)], + Record(date_field_day=19358), + "date_field_day=2023-01-01", + f"""CREATE TABLE {identifier} ( + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + day(date_field) -- Partitioning by day from 'date_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Event on 2023-01-01'); + """, + ), + # Hour Transform + ( + [PartitionField(source_id=8, field_id=1001, transform=HourTransform(), name="timestamp_field_hour")], + [datetime(2023, 1, 1, 11, 55, 59, 999999)], + Record(timestamp_field_hour=464603), + "timestamp_field_hour=2023-01-01-11", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + hour(timestamp_field) -- Partitioning by hour from 'timestamp_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 11:55:59.999999' AS TIMESTAMP), 'Event within the 11th hour of 2023-01-01'); + """, + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=HourTransform(), name="timestamptz_field_hour")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field_hour=464601), + "timestamptz_field_hour=2023-01-01-09", + f"""CREATE TABLE {identifier} ( + timestamptz_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + hour(timestamptz_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Event at 2023-01-01 12:00:01.000999+03:00'); + """, + ), + # Truncate Transform + ( + [PartitionField(source_id=4, field_id=1001, transform=TruncateTransform(10), name="int_field_trunc")], + [12345], + Record(int_field_trunc=12340), + "int_field_trunc=12340", + f"""CREATE TABLE {identifier} ( + int_field int, + string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(int_field, 10) -- Truncating 'int_field' integer column to a width of 10 + ) + """, + f"""INSERT INTO {identifier} + VALUES + (12345, 'Sample data for int'); + """, + ), + ( + [PartitionField(source_id=5, field_id=1001, transform=TruncateTransform(2), name="bigint_field_trunc")], + [2**32 + 1], + Record(bigint_field_trunc=2**32), # 4294967296 + "bigint_field_trunc=4294967296", + f"""CREATE TABLE {identifier} ( + bigint_field bigint, + string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(bigint_field, 2) -- Truncating 'bigint_field' long column to a width of 2 + ) + """, + f"""INSERT INTO {identifier} + VALUES + (4294967297, 'Sample data for long'); + """, + ), + ( + [PartitionField(source_id=2, field_id=1001, transform=TruncateTransform(3), name="string_field_trunc")], + ["abcdefg"], + Record(string_field_trunc="abc"), + "string_field_trunc=abc", + f"""CREATE TABLE {identifier} ( + string_field string, + another_string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(string_field, 3) -- Truncating 'string_field' string column to a length of 3 characters + ) + """, + f"""INSERT INTO {identifier} + VALUES + ('abcdefg', 'Another sample for string'); + """, + ), + ( + [PartitionField(source_id=13, field_id=1001, transform=TruncateTransform(width=5), name="decimal_field_trunc")], + [Decimal('678.93')], + Record(decimal_field_trunc=Decimal('678.90')), + "decimal_field_trunc=678.90", # Assuming truncation width of 1 leads to truncating to 670 + f"""CREATE TABLE {identifier} ( + decimal_field decimal(5,2), + string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(decimal_field, 2) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (678.90, 'Associated string value for decimal 678.90') + """, + ), + ( + [PartitionField(source_id=11, field_id=1001, transform=TruncateTransform(10), name="binary_field_trunc")], + [b'HELLOICEBERG'], + Record(binary_field_trunc=b'HELLOICEBE'), + "binary_field_trunc=SEVMTE9JQ0VCRQ%3D%3D", + f"""CREATE TABLE {identifier} ( + binary_field binary, + string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(binary_field, 10) -- Truncating 'binary_field' binary column to a length of 10 bytes + ) + """, + f"""INSERT INTO {identifier} + VALUES + (binary('HELLOICEBERG'), 'Sample data for binary'); + """, + ), + # Bucket Transform + ( + [PartitionField(source_id=4, field_id=1001, transform=BucketTransform(2), name="int_field_bucket")], + [10], + Record(int_field_bucket=0), + "int_field_bucket=0", + f"""CREATE TABLE {identifier} ( + int_field int, + string_field string + ) + USING iceberg + PARTITIONED BY ( + bucket(2, int_field) -- Distributing 'int_field' across 2 buckets + ) + """, + f"""INSERT INTO {identifier} + VALUES + (10, 'Integer with value 10'); + """, + ), + # Test multiple field combinations could generate the Partition record and hive partition path correctly + ( + [ + PartitionField(source_id=8, field_id=1001, transform=YearTransform(), name="timestamp_field_year"), + PartitionField(source_id=10, field_id=1002, transform=DayTransform(), name="date_field_day"), + ], + [ + datetime(2023, 1, 1, 11, 55, 59, 999999), + date(2023, 1, 1), + ], + Record(timestamp_field_year=53, date_field_day=19358), + "timestamp_field_year=2023/date_field_day=2023-01-01", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp, + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + year(timestamp_field), + day(date_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 11:55:59.999999' AS TIMESTAMP), CAST('2023-01-01' AS DATE), 'some data'); + """, + ), + ], +) +@pytest.mark.integration +def test_partition_key( + session_catalog: Catalog, + spark: SparkSession, + partition_fields: List[PartitionField], + partition_values: List[Any], + expected_partition_record: Record, + expected_hive_partition_path_slice: str, + spark_create_table_sql_for_justification: str, + spark_data_insert_sql_for_justification: str, +) -> None: + partition_field_values = [PartitionFieldValue(field, value) for field, value in zip(partition_fields, partition_values)] + spec = PartitionSpec(*partition_fields) + + key = PartitionKey( + raw_partition_field_values=partition_field_values, + partition_spec=spec, + schema=TABLE_SCHEMA, + ) + + # key.partition is used to write the metadata in DataFile, ManifestFile and all above layers + assert key.partition == expected_partition_record + # key.to_path() generates the hive partitioning part of the to-write parquet file path + assert key.to_path() == expected_hive_partition_path_slice + + # Justify expected values are not made up but conform to spark behaviors + if spark_create_table_sql_for_justification is not None and spark_data_insert_sql_for_justification is not None: + try: + spark.sql(f"drop table {identifier}") + except AnalysisException: + pass + + spark.sql(spark_create_table_sql_for_justification) + spark.sql(spark_data_insert_sql_for_justification) + + iceberg_table = session_catalog.load_table(identifier=identifier) + snapshot = iceberg_table.current_snapshot() + assert snapshot + spark_partition_for_justification = ( + snapshot.manifests(iceberg_table.io)[0].fetch_manifest_entry(iceberg_table.io)[0].data_file.partition + ) + spark_path_for_justification = ( + snapshot.manifests(iceberg_table.io)[0].fetch_manifest_entry(iceberg_table.io)[0].data_file.file_path + ) + assert spark_partition_for_justification == expected_partition_record + assert expected_hive_partition_path_slice in spark_path_for_justification diff --git a/tests/test_integration.py b/tests/integration/test_reads.py similarity index 53% rename from tests/test_integration.py rename to tests/integration/test_reads.py index 7cf017def9..fdc13ae752 100644 --- a/tests/test_integration.py +++ b/tests/integration/test_reads.py @@ -22,10 +22,13 @@ import pyarrow.parquet as pq import pytest +from hive_metastore.ttypes import LockRequest, LockResponse, LockState, UnlockRequest from pyarrow.fs import S3FileSystem +from pydantic_core import ValidationError from pyiceberg.catalog import Catalog, load_catalog -from pyiceberg.exceptions import NoSuchTableError +from pyiceberg.catalog.hive import HiveCatalog, _HiveClient +from pyiceberg.exceptions import CommitFailedException, NoSuchTableError from pyiceberg.expressions import ( And, EqualTo, @@ -50,7 +53,7 @@ @pytest.fixture() -def catalog() -> Catalog: +def catalog_rest() -> Catalog: return load_catalog( "local", **{ @@ -64,40 +67,23 @@ def catalog() -> Catalog: @pytest.fixture() -def table_test_null_nan(catalog: Catalog) -> Table: - return catalog.load_table("default.test_null_nan") - - -@pytest.fixture() -def table_test_null_nan_rewritten(catalog: Catalog) -> Table: - return catalog.load_table("default.test_null_nan_rewritten") - - -@pytest.fixture() -def table_test_limit(catalog: Catalog) -> Table: - return catalog.load_table("default.test_limit") - - -@pytest.fixture() -def table_test_all_types(catalog: Catalog) -> Table: - return catalog.load_table("default.test_all_types") - - -@pytest.fixture() -def table_test_table_version(catalog: Catalog) -> Table: - return catalog.load_table("default.test_table_version") - - -@pytest.fixture() -def table_test_table_sanitized_character(catalog: Catalog) -> Table: - return catalog.load_table("default.test_table_sanitized_character") +def catalog_hive() -> Catalog: + return load_catalog( + "local", + **{ + "type": "hive", + "uri": "http://localhost:9083", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) TABLE_NAME = ("default", "t1") -@pytest.fixture() -def table(catalog: Catalog) -> Table: +def create_table(catalog: Catalog) -> Table: try: catalog.drop_table(TABLE_NAME) except NoSuchTableError: @@ -108,49 +94,85 @@ def table(catalog: Catalog) -> Table: NestedField(field_id=2, name="int", field_type=IntegerType(), required=True), NestedField(field_id=3, name="bool", field_type=BooleanType(), required=False), NestedField(field_id=4, name="datetime", field_type=TimestampType(), required=False), - schema_id=1, ) return catalog.create_table(identifier=TABLE_NAME, schema=schema) @pytest.mark.integration -def test_table_properties(table: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_table_properties(catalog: Catalog) -> None: + table = create_table(catalog) + assert table.properties == DEFAULT_PROPERTIES with table.transaction() as transaction: transaction.set_properties(abc="🤪") - assert table.properties == dict(abc="🤪", **DEFAULT_PROPERTIES) with table.transaction() as transaction: transaction.remove_properties("abc") - assert table.properties == DEFAULT_PROPERTIES table = table.transaction().set_properties(abc="def").commit_transaction() - assert table.properties == dict(abc="def", **DEFAULT_PROPERTIES) table = table.transaction().remove_properties("abc").commit_transaction() + assert table.properties == DEFAULT_PROPERTIES + + table = table.transaction().set_properties(abc=123).commit_transaction() + # properties are stored as strings in the iceberg spec + assert table.properties == dict(abc="123", **DEFAULT_PROPERTIES) + + with pytest.raises(ValidationError) as exc_info: + table.transaction().set_properties(property_name=None).commit_transaction() + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_table_properties_dict(catalog: Catalog) -> None: + table = create_table(catalog) assert table.properties == DEFAULT_PROPERTIES + with table.transaction() as transaction: + transaction.set_properties({"abc": "🤪"}) + assert table.properties == dict({"abc": "🤪"}, **DEFAULT_PROPERTIES) -@pytest.fixture() -def test_positional_mor_deletes(catalog: Catalog) -> Table: - """Table that has positional deletes""" - return catalog.load_table("default.test_positional_mor_deletes") + with table.transaction() as transaction: + transaction.remove_properties("abc") + assert table.properties == DEFAULT_PROPERTIES + table = table.transaction().set_properties({"abc": "def"}).commit_transaction() + assert table.properties == dict({"abc": "def"}, **DEFAULT_PROPERTIES) -@pytest.fixture() -def test_positional_mor_double_deletes(catalog: Catalog) -> Table: - """Table that has multiple positional deletes""" - return catalog.load_table("default.test_positional_mor_double_deletes") + table = table.transaction().remove_properties("abc").commit_transaction() + assert table.properties == DEFAULT_PROPERTIES + + table = table.transaction().set_properties({"abc": 123}).commit_transaction() + # properties are stored as strings in the iceberg spec + assert table.properties == dict({"abc": "123"}, **DEFAULT_PROPERTIES) + + with pytest.raises(ValidationError) as exc_info: + table.transaction().set_properties({"property_name": None}).commit_transaction() + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_table_properties_error(catalog: Catalog) -> None: + table = create_table(catalog) + properties = {"abc": "def"} + with pytest.raises(ValueError) as e: + table.transaction().set_properties(properties, abc="def").commit_transaction() + assert "Cannot pass both properties and kwargs" in str(e.value) @pytest.mark.integration -def test_pyarrow_nan(table_test_null_nan: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_nan(catalog: Catalog) -> None: + table_test_null_nan = catalog.load_table("default.test_null_nan") arrow_table = table_test_null_nan.scan(row_filter=IsNaN("col_numeric"), selected_fields=("idx", "col_numeric")).to_arrow() assert len(arrow_table) == 1 assert arrow_table["idx"][0].as_py() == 1 @@ -158,7 +180,9 @@ def test_pyarrow_nan(table_test_null_nan: Table) -> None: @pytest.mark.integration -def test_pyarrow_nan_rewritten(table_test_null_nan_rewritten: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_nan_rewritten(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") arrow_table = table_test_null_nan_rewritten.scan( row_filter=IsNaN("col_numeric"), selected_fields=("idx", "col_numeric") ).to_arrow() @@ -168,14 +192,18 @@ def test_pyarrow_nan_rewritten(table_test_null_nan_rewritten: Table) -> None: @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) @pytest.mark.skip(reason="Fixing issues with NaN's: https://github.com/apache/arrow/issues/34162") -def test_pyarrow_not_nan_count(table_test_null_nan: Table) -> None: +def test_pyarrow_not_nan_count(catalog: Catalog) -> None: + table_test_null_nan = catalog.load_table("default.test_null_nan") not_nan = table_test_null_nan.scan(row_filter=NotNaN("col_numeric"), selected_fields=("idx",)).to_arrow() assert len(not_nan) == 2 @pytest.mark.integration -def test_duckdb_nan(table_test_null_nan_rewritten: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_duckdb_nan(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") con = table_test_null_nan_rewritten.scan().to_duckdb("table_test_null_nan") result = con.query("SELECT idx, col_numeric FROM table_test_null_nan WHERE isnan(col_numeric)").fetchone() assert result[0] == 1 @@ -183,7 +211,9 @@ def test_duckdb_nan(table_test_null_nan_rewritten: Table) -> None: @pytest.mark.integration -def test_pyarrow_limit(table_test_limit: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_limit(catalog: Catalog) -> None: + table_test_limit = catalog.load_table("default.test_limit") limited_result = table_test_limit.scan(selected_fields=("idx",), limit=1).to_arrow() assert len(limited_result) == 1 @@ -194,16 +224,42 @@ def test_pyarrow_limit(table_test_limit: Table) -> None: assert len(full_result) == 10 +@pytest.mark.integration @pytest.mark.filterwarnings("ignore") +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_daft_nan(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") + df = table_test_null_nan_rewritten.to_daft() + assert df.count_rows() == 3 + assert math.isnan(df.to_pydict()["col_numeric"][0]) + + @pytest.mark.integration -def test_ray_nan(table_test_null_nan_rewritten: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_daft_nan_rewritten(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") + df = table_test_null_nan_rewritten.to_daft() + df = df.where(df["col_numeric"].float.is_nan()) + df = df.select("idx", "col_numeric") + assert df.count_rows() == 1 + assert df.to_pydict()["idx"][0] == 1 + assert math.isnan(df.to_pydict()["col_numeric"][0]) + + +@pytest.mark.integration +@pytest.mark.filterwarnings("ignore") +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_ray_nan(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") ray_dataset = table_test_null_nan_rewritten.scan().to_ray() assert ray_dataset.count() == 3 assert math.isnan(ray_dataset.take()[0]["col_numeric"]) @pytest.mark.integration -def test_ray_nan_rewritten(table_test_null_nan_rewritten: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_ray_nan_rewritten(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") ray_dataset = table_test_null_nan_rewritten.scan( row_filter=IsNaN("col_numeric"), selected_fields=("idx", "col_numeric") ).to_ray() @@ -213,15 +269,19 @@ def test_ray_nan_rewritten(table_test_null_nan_rewritten: Table) -> None: @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) @pytest.mark.skip(reason="Fixing issues with NaN's: https://github.com/apache/arrow/issues/34162") -def test_ray_not_nan_count(table_test_null_nan_rewritten: Table) -> None: +def test_ray_not_nan_count(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") ray_dataset = table_test_null_nan_rewritten.scan(row_filter=NotNaN("col_numeric"), selected_fields=("idx",)).to_ray() print(ray_dataset.take()) assert ray_dataset.count() == 2 @pytest.mark.integration -def test_ray_all_types(table_test_all_types: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_ray_all_types(catalog: Catalog) -> None: + table_test_all_types = catalog.load_table("default.test_all_types") ray_dataset = table_test_all_types.scan().to_ray() pandas_dataframe = table_test_all_types.scan().to_pandas() assert ray_dataset.count() == pandas_dataframe.shape[0] @@ -229,8 +289,14 @@ def test_ray_all_types(table_test_all_types: Table) -> None: @pytest.mark.integration -def test_pyarrow_to_iceberg_all_types(table_test_all_types: Table) -> None: - fs = S3FileSystem(endpoint_override="http://localhost:9000", access_key="admin", secret_key="password") +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_to_iceberg_all_types(catalog: Catalog) -> None: + table_test_all_types = catalog.load_table("default.test_all_types") + fs = S3FileSystem( + endpoint_override=catalog.properties["s3.endpoint"], + access_key=catalog.properties["s3.access-key-id"], + secret_key=catalog.properties["s3.secret-access-key"], + ) data_file_paths = [task.file.file_path for task in table_test_all_types.scan().plan_files()] for data_file_path in data_file_paths: uri = urlparse(data_file_path) @@ -242,7 +308,8 @@ def test_pyarrow_to_iceberg_all_types(table_test_all_types: Table) -> None: @pytest.mark.integration -def test_pyarrow_deletes(test_positional_mor_deletes: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_deletes(catalog: Catalog) -> None: # number, letter # (1, 'a'), # (2, 'b'), @@ -256,6 +323,7 @@ def test_pyarrow_deletes(test_positional_mor_deletes: Table) -> None: # (10, 'j'), # (11, 'k'), # (12, 'l') + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12] @@ -277,7 +345,8 @@ def test_pyarrow_deletes(test_positional_mor_deletes: Table) -> None: @pytest.mark.integration -def test_pyarrow_deletes_double(test_positional_mor_double_deletes: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_deletes_double(catalog: Catalog) -> None: # number, letter # (1, 'a'), # (2, 'b'), @@ -291,6 +360,7 @@ def test_pyarrow_deletes_double(test_positional_mor_double_deletes: Table) -> No # (10, 'j'), # (11, 'k'), # (12, 'l') + test_positional_mor_double_deletes = catalog.load_table("default.test_positional_mor_double_deletes") arrow_table = test_positional_mor_double_deletes.scan().to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 7, 8, 10, 11, 12] @@ -312,6 +382,7 @@ def test_pyarrow_deletes_double(test_positional_mor_double_deletes: Table) -> No @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) def test_partitioned_tables(catalog: Catalog) -> None: for table_name, predicate in [ ("test_partitioned_by_identity", "ts >= '2023-03-05T00:00:00+00:00'"), @@ -328,6 +399,7 @@ def test_partitioned_tables(catalog: Catalog) -> None: @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) def test_unpartitioned_uuid_table(catalog: Catalog) -> None: unpartitioned_uuid = catalog.load_table("default.test_uuid_and_fixed_unpartitioned") arrow_table_eq = unpartitioned_uuid.scan(row_filter="uuid_col == '102cb62f-e6f8-4eb0-9973-d9b012ff0967'").to_arrow() @@ -344,6 +416,7 @@ def test_unpartitioned_uuid_table(catalog: Catalog) -> None: @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) def test_unpartitioned_fixed_table(catalog: Catalog) -> None: fixed_table = catalog.load_table("default.test_uuid_and_fixed_unpartitioned") arrow_table_eq = fixed_table.scan(row_filter=EqualTo("fixed_col", b"1234567890123456789012345")).to_arrow() @@ -362,19 +435,40 @@ def test_unpartitioned_fixed_table(catalog: Catalog) -> None: @pytest.mark.integration -def test_scan_tag(test_positional_mor_deletes: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_scan_tag(catalog: Catalog) -> None: + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().use_ref("tag_12").to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] @pytest.mark.integration -def test_scan_branch(test_positional_mor_deletes: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_scan_branch(catalog: Catalog) -> None: + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().use_ref("without_5").to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12] @pytest.mark.integration -def test_upgrade_table_version(table_test_table_version: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_filter_on_new_column(catalog: Catalog) -> None: + test_table_add_column = catalog.load_table("default.test_table_add_column") + arrow_table = test_table_add_column.scan(row_filter="b == '2'").to_arrow() + assert arrow_table["b"].to_pylist() == ['2'] + + arrow_table = test_table_add_column.scan(row_filter="b is not null").to_arrow() + assert arrow_table["b"].to_pylist() == ['2'] + + arrow_table = test_table_add_column.scan(row_filter="b is null").to_arrow() + assert arrow_table["b"].to_pylist() == [None] + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_upgrade_table_version(catalog: Catalog) -> None: + table_test_table_version = catalog.load_table("default.test_table_version") + assert table_test_table_version.format_version == 1 with table_test_table_version.transaction() as transaction: @@ -399,8 +493,45 @@ def test_upgrade_table_version(table_test_table_version: Table) -> None: @pytest.mark.integration -def test_reproduce_issue(table_test_table_sanitized_character: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_sanitize_character(catalog: Catalog) -> None: + table_test_table_sanitized_character = catalog.load_table("default.test_table_sanitized_character") arrow_table = table_test_table_sanitized_character.scan().to_arrow() assert len(arrow_table.schema.names), 1 assert len(table_test_table_sanitized_character.schema().fields), 1 assert arrow_table.schema.names[0] == table_test_table_sanitized_character.schema().fields[0].name + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_null_list_and_map(catalog: Catalog) -> None: + table_test_empty_list_and_map = catalog.load_table("default.test_table_empty_list_and_map") + arrow_table = table_test_empty_list_and_map.scan().to_arrow() + assert arrow_table["col_list"].to_pylist() == [None, []] + assert arrow_table["col_map"].to_pylist() == [None, []] + # This should be: + # assert arrow_table["col_list_with_struct"].to_pylist() == [None, [{'test': 1}]] + # Once https://github.com/apache/arrow/issues/38809 has been fixed + assert arrow_table["col_list_with_struct"].to_pylist() == [[], [{'test': 1}]] + + +@pytest.mark.integration +def test_hive_locking(catalog_hive: HiveCatalog) -> None: + table = create_table(catalog_hive) + + database_name: str + table_name: str + _, database_name, table_name = table.identifier + + hive_client: _HiveClient = _HiveClient(catalog_hive.properties["uri"]) + blocking_lock_request: LockRequest = catalog_hive._create_lock_request(database_name, table_name) + + with hive_client as open_client: + # Force a lock on the test table + lock: LockResponse = open_client.lock(blocking_lock_request) + assert lock.state == LockState.ACQUIRED + try: + with pytest.raises(CommitFailedException, match="(Failed to acquire lock for).*"): + table.transaction().set_properties(lock="fail").commit_transaction() + finally: + open_client.unlock(UnlockRequest(lock.lockid)) diff --git a/tests/test_integration_manifest.py b/tests/integration/test_rest_manifest.py similarity index 100% rename from tests/test_integration_manifest.py rename to tests/integration/test_rest_manifest.py diff --git a/tests/test_integration_schema.py b/tests/integration/test_rest_schema.py similarity index 96% rename from tests/test_integration_schema.py rename to tests/integration/test_rest_schema.py index d844e6d6c0..7aeb1bccdc 100644 --- a/tests/test_integration_schema.py +++ b/tests/integration/test_rest_schema.py @@ -20,8 +20,12 @@ from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.exceptions import CommitFailedException, NoSuchTableError, ValidationError +from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema, prune_columns -from pyiceberg.table import Table, UpdateSchema +from pyiceberg.table import Table, TableProperties, UpdateSchema +from pyiceberg.table.name_mapping import MappedField, NameMapping, create_mapping_from_schema +from pyiceberg.table.sorting import SortField, SortOrder +from pyiceberg.transforms import IdentityTransform from pyiceberg.types import ( BinaryType, BooleanType, @@ -70,13 +74,17 @@ def _create_table_with_schema(catalog: Catalog, schema: Schema) -> Table: catalog.drop_table(tbl_name) except NoSuchTableError: pass - return catalog.create_table(identifier=tbl_name, schema=schema) + return catalog.create_table( + identifier=tbl_name, + schema=schema, + properties={TableProperties.DEFAULT_NAME_MAPPING: create_mapping_from_schema(schema).model_dump_json()}, + ) @pytest.mark.integration def test_add_already_exists(catalog: Catalog, table_schema_nested: Schema) -> None: table = _create_table_with_schema(catalog, table_schema_nested) - update = UpdateSchema(table) + update = table.update_schema() with pytest.raises(ValueError) as exc_info: update.add_column("foo", IntegerType()) @@ -90,7 +98,7 @@ def test_add_already_exists(catalog: Catalog, table_schema_nested: Schema) -> No @pytest.mark.integration def test_add_to_non_struct_type(catalog: Catalog, table_schema_simple: Schema) -> None: table = _create_table_with_schema(catalog, table_schema_simple) - update = UpdateSchema(table) + update = table.update_schema() with pytest.raises(ValueError) as exc_info: update.add_column(path=("foo", "lat"), field_type=IntegerType()) assert "Cannot add column 'lat' to non-struct type: foo" in str(exc_info.value) @@ -353,7 +361,6 @@ def test_revert_changes(simple_table: Table, table_schema_simple: Schema) -> Non NestedField(field_id=1, name='foo', field_type=StringType(), required=False), NestedField(field_id=2, name='bar', field_type=IntegerType(), required=True), NestedField(field_id=3, name='baz', field_type=BooleanType(), required=False), - schema_id=0, identifier_field_ids=[2], ), 1: Schema( @@ -361,11 +368,12 @@ def test_revert_changes(simple_table: Table, table_schema_simple: Schema) -> Non NestedField(field_id=2, name='bar', field_type=IntegerType(), required=True), NestedField(field_id=3, name='baz', field_type=BooleanType(), required=False), NestedField(field_id=4, name='data', field_type=IntegerType(), required=False), - schema_id=1, identifier_field_ids=[2], ), } assert simple_table.schema().schema_id == 0 + assert simple_table.schemas()[0].schema_id == 0 + assert simple_table.schemas()[1].schema_id == 1 @pytest.mark.integration @@ -664,13 +672,24 @@ def test_rename_simple(simple_table: Table) -> None: with simple_table.update_schema() as schema_update: schema_update.rename_column("foo", "vo") + with simple_table.transaction() as txn: + with txn.update_schema() as schema_update: + schema_update.rename_column("bar", "var") + assert simple_table.schema() == Schema( NestedField(field_id=1, name="vo", field_type=StringType(), required=False), - NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=2, name="var", field_type=IntegerType(), required=True), NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), identifier_field_ids=[2], ) + # Check that the name mapping gets updated + assert simple_table.name_mapping() == NameMapping([ + MappedField(field_id=1, names=['foo', 'vo']), + MappedField(field_id=2, names=['bar', 'var']), + MappedField(field_id=3, names=['baz']), + ]) + @pytest.mark.integration def test_rename_simple_nested(catalog: Catalog) -> None: @@ -698,6 +717,11 @@ def test_rename_simple_nested(catalog: Catalog) -> None: ), ) + # Check that the name mapping gets updated + assert tbl.name_mapping() == NameMapping([ + MappedField(field_id=1, names=['foo'], fields=[MappedField(field_id=2, names=['bar', 'vo'])]), + ]) + @pytest.mark.integration def test_rename_simple_nested_with_dots(catalog: Catalog) -> None: @@ -1046,13 +1070,13 @@ def test_add_nested_list_of_structs(catalog: Catalog) -> None: def test_add_required_column(catalog: Catalog) -> None: schema_ = Schema(NestedField(field_id=1, name="a", field_type=BooleanType(), required=False)) table = _create_table_with_schema(catalog, schema_) - update = UpdateSchema(table) + update = table.update_schema() with pytest.raises(ValueError) as exc_info: update.add_column(path="data", field_type=IntegerType(), required=True) assert "Incompatible change: cannot add required column: data" in str(exc_info.value) new_schema = ( - UpdateSchema(table, allow_incompatible_changes=True) # pylint: disable=W0212 + UpdateSchema(transaction=table.transaction(), allow_incompatible_changes=True) .add_column(path="data", field_type=IntegerType(), required=True) ._apply() ) @@ -1068,12 +1092,13 @@ def test_add_required_column_case_insensitive(catalog: Catalog) -> None: table = _create_table_with_schema(catalog, schema_) with pytest.raises(ValueError) as exc_info: - with UpdateSchema(table, allow_incompatible_changes=True) as update: - update.case_sensitive(False).add_column(path="ID", field_type=IntegerType(), required=True) + with table.transaction() as txn: + with txn.update_schema(allow_incompatible_changes=True) as update: + update.case_sensitive(False).add_column(path="ID", field_type=IntegerType(), required=True) assert "already exists: ID" in str(exc_info.value) new_schema = ( - UpdateSchema(table, allow_incompatible_changes=True) # pylint: disable=W0212 + UpdateSchema(transaction=table.transaction(), allow_incompatible_changes=True) .add_column(path="ID", field_type=IntegerType(), required=True) ._apply() ) @@ -1244,7 +1269,7 @@ def test_mixed_changes(catalog: Catalog) -> None: @pytest.mark.integration def test_ambiguous_column(catalog: Catalog, table_schema_nested: Schema) -> None: table = _create_table_with_schema(catalog, table_schema_nested) - update = UpdateSchema(table) + update = UpdateSchema(transaction=table.transaction()) with pytest.raises(ValueError) as exc_info: update.add_column(path="location.latitude", field_type=IntegerType()) @@ -2487,13 +2512,40 @@ def test_two_add_schemas_in_a_single_transaction(catalog: Catalog) -> None: ), ) - with pytest.raises(ValueError) as exc_info: + with pytest.raises(CommitFailedException) as exc_info: with tbl.transaction() as tr: with tr.update_schema() as update: update.add_column("bar", field_type=StringType()) with tr.update_schema() as update: update.add_column("baz", field_type=StringType()) - assert "Updates in a single commit need to be unique, duplicate: " in str( - exc_info.value + assert "CommitFailedException: Requirement failed: current schema changed: expected id 1 != 0" in str(exc_info.value) + + +@pytest.mark.integration +def test_create_table_integrity_after_fresh_assignment(catalog: Catalog) -> None: + schema = Schema( + NestedField(field_id=5, name="col_uuid", field_type=UUIDType(), required=False), + NestedField(field_id=4, name="col_fixed", field_type=FixedType(25), required=False), + ) + partition_spec = PartitionSpec( + PartitionField(source_id=5, field_id=1000, transform=IdentityTransform(), name="col_uuid"), spec_id=0 + ) + sort_order = SortOrder(SortField(source_id=4, transform=IdentityTransform())) + tbl_name = "default.test_create_integrity" + try: + catalog.drop_table(tbl_name) + except NoSuchTableError: + pass + tbl = catalog.create_table(identifier=tbl_name, schema=schema, partition_spec=partition_spec, sort_order=sort_order) + expected_schema = Schema( + NestedField(field_id=1, name="col_uuid", field_type=UUIDType(), required=False), + NestedField(field_id=2, name="col_fixed", field_type=FixedType(25), required=False), + ) + expected_spec = PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=IdentityTransform(), name="col_uuid"), spec_id=0 ) + expected_sort_order = SortOrder(SortField(source_id=2, transform=IdentityTransform())) + assert tbl.schema() == expected_schema + assert tbl.spec() == expected_spec + assert tbl.sort_order() == expected_sort_order diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py new file mode 100644 index 0000000000..82c4ace711 --- /dev/null +++ b/tests/integration/test_writes.py @@ -0,0 +1,666 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint:disable=redefined-outer-name +import os +import time +import uuid +from datetime import date, datetime +from pathlib import Path +from typing import Any, Dict, List +from urllib.parse import urlparse + +import pyarrow as pa +import pyarrow.parquet as pq +import pytest +import pytz +from pyarrow.fs import S3FileSystem +from pydantic_core import ValidationError +from pyspark.sql import SparkSession +from pytest_mock.plugin import MockerFixture + +from pyiceberg.catalog import Catalog +from pyiceberg.catalog.sql import SqlCatalog +from pyiceberg.exceptions import NoSuchTableError +from pyiceberg.schema import Schema +from pyiceberg.table import Table, _dataframe_to_data_files +from pyiceberg.typedef import Properties +from pyiceberg.types import ( + BinaryType, + BooleanType, + DateType, + DoubleType, + FixedType, + FloatType, + IntegerType, + LongType, + NestedField, + StringType, + TimestampType, + TimestamptzType, +) + +TEST_DATA_WITH_NULL = { + 'bool': [False, None, True], + 'string': ['a', None, 'z'], + # Go over the 16 bytes to kick in truncation + 'string_long': ['a' * 22, None, 'z' * 22], + 'int': [1, None, 9], + 'long': [1, None, 9], + 'float': [0.0, None, 0.9], + 'double': [0.0, None, 0.9], + 'timestamp': [datetime(2023, 1, 1, 19, 25, 00), None, datetime(2023, 3, 1, 19, 25, 00)], + 'timestamptz': [datetime(2023, 1, 1, 19, 25, 00), None, datetime(2023, 3, 1, 19, 25, 00)], + 'date': [date(2023, 1, 1), None, date(2023, 3, 1)], + # Not supported by Spark + # 'time': [time(1, 22, 0), None, time(19, 25, 0)], + # Not natively supported by Arrow + # 'uuid': [uuid.UUID('00000000-0000-0000-0000-000000000000').bytes, None, uuid.UUID('11111111-1111-1111-1111-111111111111').bytes], + 'binary': [b'\01', None, b'\22'], + 'fixed': [ + uuid.UUID('00000000-0000-0000-0000-000000000000').bytes, + None, + uuid.UUID('11111111-1111-1111-1111-111111111111').bytes, + ], +} + +TABLE_SCHEMA = Schema( + NestedField(field_id=1, name="bool", field_type=BooleanType(), required=False), + NestedField(field_id=2, name="string", field_type=StringType(), required=False), + NestedField(field_id=3, name="string_long", field_type=StringType(), required=False), + NestedField(field_id=4, name="int", field_type=IntegerType(), required=False), + NestedField(field_id=5, name="long", field_type=LongType(), required=False), + NestedField(field_id=6, name="float", field_type=FloatType(), required=False), + NestedField(field_id=7, name="double", field_type=DoubleType(), required=False), + NestedField(field_id=8, name="timestamp", field_type=TimestampType(), required=False), + NestedField(field_id=9, name="timestamptz", field_type=TimestamptzType(), required=False), + NestedField(field_id=10, name="date", field_type=DateType(), required=False), + # NestedField(field_id=11, name="time", field_type=TimeType(), required=False), + # NestedField(field_id=12, name="uuid", field_type=UuidType(), required=False), + NestedField(field_id=12, name="binary", field_type=BinaryType(), required=False), + NestedField(field_id=13, name="fixed", field_type=FixedType(16), required=False), +) + + +@pytest.fixture(scope="session") +def pa_schema() -> pa.Schema: + return pa.schema([ + ("bool", pa.bool_()), + ("string", pa.string()), + ("string_long", pa.string()), + ("int", pa.int32()), + ("long", pa.int64()), + ("float", pa.float32()), + ("double", pa.float64()), + ("timestamp", pa.timestamp(unit="us")), + ("timestamptz", pa.timestamp(unit="us", tz="UTC")), + ("date", pa.date32()), + # Not supported by Spark + # ("time", pa.time64("us")), + # Not natively supported by Arrow + # ("uuid", pa.fixed(16)), + ("binary", pa.large_binary()), + ("fixed", pa.binary(16)), + ]) + + +@pytest.fixture(scope="session") +def arrow_table_with_null(pa_schema: pa.Schema) -> pa.Table: + """PyArrow table with all kinds of columns""" + return pa.Table.from_pydict(TEST_DATA_WITH_NULL, schema=pa_schema) + + +@pytest.fixture(scope="session") +def arrow_table_without_data(pa_schema: pa.Schema) -> pa.Table: + """PyArrow table with all kinds of columns""" + return pa.Table.from_pylist([], schema=pa_schema) + + +@pytest.fixture(scope="session") +def arrow_table_with_only_nulls(pa_schema: pa.Schema) -> pa.Table: + """PyArrow table with all kinds of columns""" + return pa.Table.from_pylist([{}, {}], schema=pa_schema) + + +def _create_table(session_catalog: Catalog, identifier: str, properties: Properties, data: List[pa.Table]) -> Table: + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties=properties) + for d in data: + tbl.append(d) + + return tbl + + +@pytest.fixture(scope="session", autouse=True) +def table_v1_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v1_with_null" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, [arrow_table_with_null]) + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v1_without_data(session_catalog: Catalog, arrow_table_without_data: pa.Table) -> None: + identifier = "default.arrow_table_v1_without_data" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, [arrow_table_without_data]) + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v1_with_only_nulls(session_catalog: Catalog, arrow_table_with_only_nulls: pa.Table) -> None: + identifier = "default.arrow_table_v1_with_only_nulls" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, [arrow_table_with_only_nulls]) + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v1_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v1_appended_with_null" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, 2 * [arrow_table_with_null]) + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v2_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v2_with_null" + tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_null]) + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v2_without_data(session_catalog: Catalog, arrow_table_without_data: pa.Table) -> None: + identifier = "default.arrow_table_v2_without_data" + tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_without_data]) + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v2_with_only_nulls(session_catalog: Catalog, arrow_table_with_only_nulls: pa.Table) -> None: + identifier = "default.arrow_table_v2_with_only_nulls" + tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_only_nulls]) + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v2_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v2_appended_with_null" + tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, 2 * [arrow_table_with_null]) + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v1_v2_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v1_v2_appended_with_null" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, [arrow_table_with_null]) + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + with tbl.transaction() as tx: + tx.upgrade_table_version(format_version=2) + + tbl.append(arrow_table_with_null) + + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_count(spark: SparkSession, format_version: int) -> None: + df = spark.table(f"default.arrow_table_v{format_version}_with_null") + assert df.count() == 3, "Expected 3 rows" + + +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_filter_null(spark: SparkSession, col: str, format_version: int) -> None: + identifier = f"default.arrow_table_v{format_version}_with_null" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 1, f"Expected 1 row for {col}" + assert df.where(f"{col} is not null").count() == 2, f"Expected 2 rows for {col}" + + +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_filter_without_data(spark: SparkSession, col: str, format_version: int) -> None: + identifier = f"default.arrow_table_v{format_version}_without_data" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 0, f"Expected 0 row for {col}" + assert df.where(f"{col} is not null").count() == 0, f"Expected 0 rows for {col}" + + +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_filter_only_nulls(spark: SparkSession, col: str, format_version: int) -> None: + identifier = f"default.arrow_table_v{format_version}_with_only_nulls" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 2, f"Expected 2 row for {col}" + assert df.where(f"{col} is not null").count() == 0, f"Expected 0 rows for {col}" + + +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_filter_appended_null(spark: SparkSession, col: str, format_version: int) -> None: + identifier = f"default.arrow_table_v{format_version}_appended_with_null" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 2, f"Expected 1 row for {col}" + assert df.where(f"{col} is not null").count() == 4, f"Expected 2 rows for {col}" + + +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +def test_query_filter_v1_v2_append_null(spark: SparkSession, col: str) -> None: + identifier = "default.arrow_table_v1_v2_appended_with_null" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 2, f"Expected 1 row for {col}" + assert df.where(f"{col} is not null").count() == 4, f"Expected 2 rows for {col}" + + +@pytest.mark.integration +def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_summaries" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, 2 * [arrow_table_with_null]) + tbl.overwrite(arrow_table_with_null) + + rows = spark.sql( + f""" + SELECT operation, summary + FROM {identifier}.snapshots + ORDER BY committed_at ASC + """ + ).collect() + + operations = [row.operation for row in rows] + assert operations == ['append', 'append', 'overwrite'] + + summaries = [row.summary for row in rows] + + assert summaries[0] == { + 'added-data-files': '1', + 'added-files-size': '5459', + 'added-records': '3', + 'total-data-files': '1', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '5459', + 'total-position-deletes': '0', + 'total-records': '3', + } + + assert summaries[1] == { + 'added-data-files': '1', + 'added-files-size': '5459', + 'added-records': '3', + 'total-data-files': '2', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '10918', + 'total-position-deletes': '0', + 'total-records': '6', + } + + assert summaries[2] == { + 'added-data-files': '1', + 'added-files-size': '5459', + 'added-records': '3', + 'deleted-data-files': '2', + 'deleted-records': '6', + 'removed-files-size': '10918', + 'total-data-files': '1', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '5459', + 'total-position-deletes': '0', + 'total-records': '3', + } + + +@pytest.mark.integration +def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_data_files" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, []) + + tbl.overwrite(arrow_table_with_null) + # should produce a DELETE entry + tbl.overwrite(arrow_table_with_null) + # Since we don't rewrite, this should produce a new manifest with an ADDED entry + tbl.append(arrow_table_with_null) + + rows = spark.sql( + f""" + SELECT added_data_files_count, existing_data_files_count, deleted_data_files_count + FROM {identifier}.all_manifests + """ + ).collect() + + assert [row.added_data_files_count for row in rows] == [1, 1, 0, 1, 1] + assert [row.existing_data_files_count for row in rows] == [0, 0, 0, 0, 0] + assert [row.deleted_data_files_count for row in rows] == [0, 0, 1, 0, 0] + + +@pytest.mark.integration +def test_python_writes_with_spark_snapshot_reads( + spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table +) -> None: + identifier = "default.python_writes_with_spark_snapshot_reads" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, []) + + def get_current_snapshot_id(identifier: str) -> int: + return ( + spark.sql(f"SELECT snapshot_id FROM {identifier}.snapshots order by committed_at desc limit 1") + .collect()[0] + .snapshot_id + ) + + tbl.overwrite(arrow_table_with_null) + assert tbl.current_snapshot().snapshot_id == get_current_snapshot_id(identifier) # type: ignore + tbl.overwrite(arrow_table_with_null) + assert tbl.current_snapshot().snapshot_id == get_current_snapshot_id(identifier) # type: ignore + tbl.append(arrow_table_with_null) + assert tbl.current_snapshot().snapshot_id == get_current_snapshot_id(identifier) # type: ignore + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +@pytest.mark.parametrize( + "properties, expected_compression_name", + [ + # REST catalog uses Zstandard by default: https://github.com/apache/iceberg/pull/8593 + ({}, "ZSTD"), + ({"write.parquet.compression-codec": "uncompressed"}, "UNCOMPRESSED"), + ({"write.parquet.compression-codec": "gzip", "write.parquet.compression-level": "1"}, "GZIP"), + ({"write.parquet.compression-codec": "zstd", "write.parquet.compression-level": "1"}, "ZSTD"), + ({"write.parquet.compression-codec": "snappy"}, "SNAPPY"), + ], +) +def test_write_parquet_compression_properties( + spark: SparkSession, + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + format_version: int, + properties: Dict[str, Any], + expected_compression_name: str, +) -> None: + identifier = "default.write_parquet_compression_properties" + + tbl = _create_table(session_catalog, identifier, {"format-version": format_version, **properties}, [arrow_table_with_null]) + + data_file_paths = [task.file.file_path for task in tbl.scan().plan_files()] + + fs = S3FileSystem( + endpoint_override=session_catalog.properties["s3.endpoint"], + access_key=session_catalog.properties["s3.access-key-id"], + secret_key=session_catalog.properties["s3.secret-access-key"], + ) + uri = urlparse(data_file_paths[0]) + with fs.open_input_file(f"{uri.netloc}{uri.path}") as f: + parquet_metadata = pq.read_metadata(f) + compression = parquet_metadata.row_group(0).column(0).compression + + assert compression == expected_compression_name + + +@pytest.mark.integration +@pytest.mark.parametrize( + "properties, expected_kwargs", + [ + ({"write.parquet.page-size-bytes": "42"}, {"data_page_size": 42}), + ({"write.parquet.dict-size-bytes": "42"}, {"dictionary_pagesize_limit": 42}), + ], +) +def test_write_parquet_other_properties( + mocker: MockerFixture, + spark: SparkSession, + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + properties: Dict[str, Any], + expected_kwargs: Dict[str, Any], +) -> None: + print(type(mocker)) + identifier = "default.test_write_parquet_other_properties" + + # The properties we test cannot be checked on the resulting Parquet file, so we spy on the ParquetWriter call instead + ParquetWriter = mocker.spy(pq, "ParquetWriter") + _create_table(session_catalog, identifier, properties, [arrow_table_with_null]) + + call_kwargs = ParquetWriter.call_args[1] + for key, value in expected_kwargs.items(): + assert call_kwargs.get(key) == value + + +@pytest.mark.integration +@pytest.mark.parametrize( + "properties", + [ + {"write.parquet.row-group-size-bytes": "42"}, + {"write.parquet.page-row-limit": "42"}, + {"write.parquet.bloom-filter-enabled.column.bool": "42"}, + {"write.parquet.bloom-filter-max-bytes": "42"}, + ], +) +def test_write_parquet_unsupported_properties( + spark: SparkSession, + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + properties: Dict[str, str], +) -> None: + identifier = "default.write_parquet_unsupported_properties" + + tbl = _create_table(session_catalog, identifier, properties, []) + with pytest.raises(NotImplementedError): + tbl.append(arrow_table_with_null) + + +@pytest.mark.integration +def test_invalid_arguments(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_data_files" + tbl = _create_table(session_catalog, identifier, {'format-version': '1'}, []) + + with pytest.raises(ValueError, match="Expected PyArrow table, got: not a df"): + tbl.overwrite("not a df") + + with pytest.raises(ValueError, match="Expected PyArrow table, got: not a df"): + tbl.append("not a df") + + +@pytest.mark.integration +def test_summaries_with_only_nulls( + spark: SparkSession, session_catalog: Catalog, arrow_table_without_data: pa.Table, arrow_table_with_only_nulls: pa.Table +) -> None: + identifier = "default.arrow_table_summaries_with_only_nulls" + tbl = _create_table( + session_catalog, identifier, {'format-version': '1'}, [arrow_table_without_data, arrow_table_with_only_nulls] + ) + tbl.overwrite(arrow_table_without_data) + + rows = spark.sql( + f""" + SELECT operation, summary + FROM {identifier}.snapshots + ORDER BY committed_at ASC + """ + ).collect() + + operations = [row.operation for row in rows] + assert operations == ['append', 'append', 'overwrite'] + + summaries = [row.summary for row in rows] + + assert summaries[0] == { + 'total-data-files': '0', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '0', + 'total-position-deletes': '0', + 'total-records': '0', + } + + assert summaries[1] == { + 'added-data-files': '1', + 'added-files-size': '4239', + 'added-records': '2', + 'total-data-files': '1', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '4239', + 'total-position-deletes': '0', + 'total-records': '2', + } + + assert summaries[0] == { + 'total-data-files': '0', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '0', + 'total-position-deletes': '0', + 'total-records': '0', + } + + +@pytest.mark.integration +def test_duckdb_url_import(warehouse: Path, arrow_table_with_null: pa.Table) -> None: + os.environ['TZ'] = 'Etc/UTC' + time.tzset() + tz = pytz.timezone(os.environ['TZ']) + + catalog = SqlCatalog("test_sql_catalog", uri="sqlite:///:memory:", warehouse=f"/{warehouse}") + catalog.create_namespace("default") + + identifier = "default.arrow_table_v1_with_null" + tbl = _create_table(catalog, identifier, {}, [arrow_table_with_null]) + location = tbl.metadata_location + + import duckdb + + duckdb.sql('INSTALL iceberg; LOAD iceberg;') + result = duckdb.sql( + f""" + SELECT * + FROM iceberg_scan('{location}') + """ + ).fetchall() + + assert result == [ + ( + False, + 'a', + 'aaaaaaaaaaaaaaaaaaaaaa', + 1, + 1, + 0.0, + 0.0, + datetime(2023, 1, 1, 19, 25), + datetime(2023, 1, 1, 19, 25, tzinfo=tz), + date(2023, 1, 1), + b'\x01', + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', + ), + (None, None, None, None, None, None, None, None, None, None, None, None), + ( + True, + 'z', + 'zzzzzzzzzzzzzzzzzzzzzz', + 9, + 9, + 0.8999999761581421, + 0.9, + datetime(2023, 3, 1, 19, 25), + datetime(2023, 3, 1, 19, 25, tzinfo=tz), + date(2023, 3, 1), + b'\x12', + b'\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11', + ), + ] + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +def test_write_and_evolve(session_catalog: Catalog, format_version: int) -> None: + identifier = f"default.arrow_write_data_and_evolve_schema_v{format_version}" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + pa_table = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + }, + schema=pa.schema([pa.field("foo", pa.string(), nullable=True)]), + ) + + tbl = session_catalog.create_table( + identifier=identifier, schema=pa_table.schema, properties={"format-version": str(format_version)} + ) + + pa_table_with_column = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + 'bar': [19, None, 25], + }, + schema=pa.schema([ + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=True), + ]), + ) + + with tbl.transaction() as txn: + with txn.update_schema() as schema_txn: + schema_txn.union_by_name(pa_table_with_column.schema) + + with txn.update_snapshot().fast_append() as snapshot_update: + for data_file in _dataframe_to_data_files(table_metadata=txn.table_metadata, df=pa_table_with_column, io=tbl.io): + snapshot_update.append_data_file(data_file) + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +def test_table_properties_int_value( + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + format_version: int, +) -> None: + # table properties can be set to int, but still serialized to string + property_with_int = {"property_name": 42} + identifier = "default.test_table_properties_int_value" + + tbl = _create_table( + session_catalog, identifier, {"format-version": format_version, **property_with_int}, [arrow_table_with_null] + ) + assert isinstance(tbl.properties["property_name"], str) + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +def test_table_properties_raise_for_none_value( + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + format_version: int, +) -> None: + property_with_none = {"property_name": None} + identifier = "default.test_table_properties_raise_for_none_value" + + with pytest.raises(ValidationError) as exc_info: + _ = _create_table( + session_catalog, identifier, {"format-version": format_version, **property_with_none}, [arrow_table_with_null] + ) + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) diff --git a/tests/io/test_fsspec.py b/tests/io/test_fsspec.py index f83268b56f..9f044454b4 100644 --- a/tests/io/test_fsspec.py +++ b/tests/io/test_fsspec.py @@ -15,10 +15,13 @@ # specific language governing permissions and limitations # under the License. +import os +import tempfile import uuid import pytest from botocore.awsrequest import AWSRequest +from fsspec.implementations.local import LocalFileSystem from requests_mock import Mocker from pyiceberg.exceptions import SignError @@ -27,6 +30,26 @@ from pyiceberg.io.pyarrow import PyArrowFileIO +def test_fsspec_infer_local_fs_from_path(fsspec_fileio: FsspecFileIO) -> None: + """Test path with `file` scheme and no scheme both use LocalFileSystem""" + assert isinstance(fsspec_fileio.new_output("file://tmp/warehouse")._fs, LocalFileSystem) + assert isinstance(fsspec_fileio.new_output("/tmp/warehouse")._fs, LocalFileSystem) + + +def test_fsspec_local_fs_can_create_path_without_parent_dir(fsspec_fileio: FsspecFileIO) -> None: + """Test LocalFileSystem can create path without first creating the parent directories""" + with tempfile.TemporaryDirectory() as tmpdirname: + file_path = f"{tmpdirname}/foo/bar/baz.txt" + output_file = fsspec_fileio.new_output(file_path) + parent_path = os.path.dirname(file_path) + assert output_file._fs.exists(parent_path) is False + try: + with output_file.create() as f: + f.write(b"foo") + except Exception: + pytest.fail("Failed to write to file without parent directory") + + @pytest.mark.s3 def test_fsspec_new_input_file(fsspec_fileio: FsspecFileIO) -> None: """Test creating a new input file from a fsspec file-io""" diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index be5f68e429..c2d8f7bd12 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -18,6 +18,7 @@ import os import tempfile +from datetime import date from typing import Any, List, Optional from unittest.mock import MagicMock, patch from uuid import uuid4 @@ -27,8 +28,8 @@ import pytest from pyarrow.fs import FileType, LocalFileSystem -from pyiceberg.avro.resolver import ResolveError from pyiceberg.catalog.noop import NoopCatalog +from pyiceberg.exceptions import ResolveError from pyiceberg.expressions import ( AlwaysFalse, AlwaysTrue, @@ -52,14 +53,16 @@ GreaterThan, Not, Or, - literal, ) +from pyiceberg.expressions.literals import literal from pyiceberg.io import InputStream, OutputStream, load_file_io from pyiceberg.io.pyarrow import ( ICEBERG_SCHEMA, PyArrowFile, PyArrowFileIO, + StatsAggregator, _ConvertToArrowSchema, + _primitive_to_physical, _read_deletes, expression_to_pyarrow, project_table, @@ -84,6 +87,7 @@ LongType, MapType, NestedField, + PrimitiveType, StringType, StructType, TimestampType, @@ -92,6 +96,26 @@ ) +def test_pyarrow_infer_local_fs_from_path() -> None: + """Test path with `file` scheme and no scheme both use LocalFileSystem""" + assert isinstance(PyArrowFileIO().new_output("file://tmp/warehouse")._filesystem, LocalFileSystem) + assert isinstance(PyArrowFileIO().new_output("/tmp/warehouse")._filesystem, LocalFileSystem) + + +def test_pyarrow_local_fs_can_create_path_without_parent_dir() -> None: + """Test LocalFileSystem can create path without first creating the parent directories""" + with tempfile.TemporaryDirectory() as tmpdirname: + file_path = f"{tmpdirname}/foo/bar/baz.txt" + output_file = PyArrowFileIO().new_output(file_path) + parent_path = os.path.dirname(file_path) + assert output_file._filesystem.get_file_info(parent_path).type == FileType.NotFound + try: + with output_file.create() as f: + f.write(b"foo") + except Exception: + pytest.fail("Failed to write to file without parent directory") + + def test_pyarrow_input_file() -> None: """Test reading a file using PyArrowFile""" @@ -324,57 +348,57 @@ def test_schema_to_pyarrow_schema(table_schema_nested: Schema) -> None: actual = schema_to_pyarrow(table_schema_nested) expected = """foo: string -- field metadata -- - field_id: '1' + PARQUET:field_id: '1' bar: int32 not null -- field metadata -- - field_id: '2' + PARQUET:field_id: '2' baz: bool -- field metadata -- - field_id: '3' + PARQUET:field_id: '3' qux: list not null child 0, element: string not null -- field metadata -- - field_id: '5' + PARQUET:field_id: '5' -- field metadata -- - field_id: '4' + PARQUET:field_id: '4' quux: map> not null child 0, entries: struct not null> not null child 0, key: string not null -- field metadata -- - field_id: '7' + PARQUET:field_id: '7' child 1, value: map not null child 0, entries: struct not null child 0, key: string not null -- field metadata -- - field_id: '9' + PARQUET:field_id: '9' child 1, value: int32 not null -- field metadata -- - field_id: '10' + PARQUET:field_id: '10' -- field metadata -- - field_id: '8' + PARQUET:field_id: '8' -- field metadata -- - field_id: '6' + PARQUET:field_id: '6' location: list not null> not null child 0, element: struct not null child 0, latitude: float -- field metadata -- - field_id: '13' + PARQUET:field_id: '13' child 1, longitude: float -- field metadata -- - field_id: '14' + PARQUET:field_id: '14' -- field metadata -- - field_id: '12' + PARQUET:field_id: '12' -- field metadata -- - field_id: '11' + PARQUET:field_id: '11' person: struct child 0, name: string -- field metadata -- - field_id: '16' + PARQUET:field_id: '16' child 1, age: int32 not null -- field metadata -- - field_id: '17' + PARQUET:field_id: '17' -- field metadata -- - field_id: '15'""" + PARQUET:field_id: '15'""" assert repr(actual) == expected @@ -443,17 +467,15 @@ def test_string_type_to_pyarrow() -> None: def test_binary_type_to_pyarrow() -> None: iceberg_type = BinaryType() - assert visit(iceberg_type, _ConvertToArrowSchema()) == pa.binary() + assert visit(iceberg_type, _ConvertToArrowSchema()) == pa.large_binary() def test_struct_type_to_pyarrow(table_schema_simple: Schema) -> None: - expected = pa.struct( - [ - pa.field("foo", pa.string(), nullable=True, metadata={"field_id": "1"}), - pa.field("bar", pa.int32(), nullable=False, metadata={"field_id": "2"}), - pa.field("baz", pa.bool_(), nullable=True, metadata={"field_id": "3"}), - ] - ) + expected = pa.struct([ + pa.field("foo", pa.string(), nullable=True, metadata={"field_id": "1"}), + pa.field("bar", pa.int32(), nullable=False, metadata={"field_id": "2"}), + pa.field("baz", pa.bool_(), nullable=True, metadata={"field_id": "3"}), + ]) assert visit(table_schema_simple.as_struct(), _ConvertToArrowSchema()) == expected @@ -684,6 +706,24 @@ def schema_list_of_structs() -> Schema: ) +@pytest.fixture +def schema_map_of_structs() -> Schema: + return Schema( + NestedField( + 5, + "locations", + MapType( + key_id=51, + value_id=52, + key_type=StringType(), + value_type=StructType(NestedField(511, "lat", DoubleType()), NestedField(512, "long", DoubleType())), + element_required=False, + ), + required=False, + ), + ) + + @pytest.fixture def schema_map() -> Schema: return Schema( @@ -795,6 +835,25 @@ def file_list_of_structs(schema_list_of_structs: Schema, tmpdir: str) -> str: ) +@pytest.fixture +def file_map_of_structs(schema_map_of_structs: Schema, tmpdir: str) -> str: + pyarrow_schema = schema_to_pyarrow( + schema_map_of_structs, metadata={ICEBERG_SCHEMA: bytes(schema_map_of_structs.model_dump_json(), UTF8)} + ) + return _write_table_to_file( + f"file:{tmpdir}/e.parquet", + pyarrow_schema, + pa.Table.from_pylist( + [ + {"locations": {"1": {"lat": 52.371807, "long": 4.896029}, "2": {"lat": 52.387386, "long": 4.646219}}}, + {"locations": {}}, + {"locations": {"3": {"lat": 52.078663, "long": 4.288788}, "4": {"lat": 52.387386, "long": 4.646219}}}, + ], + schema=pyarrow_schema, + ), + ) + + @pytest.fixture def file_map(schema_map: Schema, tmpdir: str) -> str: pyarrow_schema = schema_to_pyarrow(schema_map, metadata={ICEBERG_SCHEMA: bytes(schema_map.model_dump_json(), UTF8)}) @@ -890,22 +949,22 @@ def test_projection_add_column(file_int: str) -> None: list: list child 0, element: int32 -- field metadata -- - field_id: '21' + PARQUET:field_id: '21' map: map child 0, entries: struct not null child 0, key: int32 not null -- field metadata -- - field_id: '31' + PARQUET:field_id: '31' child 1, value: string -- field metadata -- - field_id: '32' + PARQUET:field_id: '32' location: struct child 0, lat: double -- field metadata -- - field_id: '41' + PARQUET:field_id: '41' child 1, lon: double -- field metadata -- - field_id: '42'""" + PARQUET:field_id: '42'""" ) @@ -916,7 +975,11 @@ def test_read_list(schema_list: Schema, file_list: str) -> None: for actual, expected in zip(result_table.columns[0], [list(range(1, 10)), list(range(2, 20)), list(range(3, 30))]): assert actual.as_py() == expected - assert repr(result_table.schema) == "ids: list\n child 0, item: int32" + assert ( + repr(result_table.schema) + == """ids: list + child 0, element: int32""" + ) def test_read_map(schema_map: Schema, file_map: str) -> None: @@ -929,9 +992,9 @@ def test_read_map(schema_map: Schema, file_map: str) -> None: assert ( repr(result_table.schema) == """properties: map - child 0, entries: struct not null + child 0, entries: struct not null child 0, key: string not null - child 1, value: string""" + child 1, value: string not null""" ) @@ -955,10 +1018,10 @@ def test_projection_add_column_struct(schema_int: Schema, file_int: str) -> None child 0, entries: struct not null child 0, key: int32 not null -- field metadata -- - field_id: '3' + PARQUET:field_id: '3' child 1, value: string -- field metadata -- - field_id: '4'""" + PARQUET:field_id: '4'""" ) @@ -1006,7 +1069,7 @@ def test_projection_filter(schema_int: Schema, file_int: str) -> None: repr(result_table.schema) == """id: int32 -- field metadata -- - field_id: '1'""" + PARQUET:field_id: '1'""" ) @@ -1065,7 +1128,11 @@ def test_projection_nested_struct_subset(file_struct: str) -> None: assert actual.as_py() == {"lat": expected} assert len(result_table.columns[0]) == 3 - assert repr(result_table.schema) == "location: struct not null\n child 0, lat: double not null" + assert ( + repr(result_table.schema) + == """location: struct not null + child 0, lat: double not null""" + ) def test_projection_nested_new_field(file_struct: str) -> None: @@ -1084,7 +1151,11 @@ def test_projection_nested_new_field(file_struct: str) -> None: for actual, expected in zip(result_table.columns[0], [None, None, None]): assert actual.as_py() == {"null": expected} assert len(result_table.columns[0]) == 3 - assert repr(result_table.schema) == "location: struct not null\n child 0, null: double" + assert ( + repr(result_table.schema) + == """location: struct not null + child 0, null: double""" + ) def test_projection_nested_struct(schema_struct: Schema, file_struct: str) -> None: @@ -1113,7 +1184,10 @@ def test_projection_nested_struct(schema_struct: Schema, file_struct: str) -> No assert len(result_table.columns[0]) == 3 assert ( repr(result_table.schema) - == "location: struct not null\n child 0, lat: double\n child 1, null: double\n child 2, long: double" + == """location: struct not null + child 0, lat: double + child 1, null: double + child 2, long: double""" ) @@ -1138,28 +1212,75 @@ def test_projection_list_of_structs(schema_list_of_structs: Schema, file_list_of result_table = project(schema, [file_list_of_structs]) assert len(result_table.columns) == 1 assert len(result_table.columns[0]) == 3 + results = [row.as_py() for row in result_table.columns[0]] + assert results == [ + [ + {'latitude': 52.371807, 'longitude': 4.896029, 'altitude': None}, + {'latitude': 52.387386, 'longitude': 4.646219, 'altitude': None}, + ], + [], + [ + {'latitude': 52.078663, 'longitude': 4.288788, 'altitude': None}, + {'latitude': 52.387386, 'longitude': 4.646219, 'altitude': None}, + ], + ] + assert ( + repr(result_table.schema) + == """locations: list> + child 0, element: struct + child 0, latitude: double not null + child 1, longitude: double not null + child 2, altitude: double""" + ) + + +def test_projection_maps_of_structs(schema_map_of_structs: Schema, file_map_of_structs: str) -> None: + schema = Schema( + NestedField( + 5, + "locations", + MapType( + key_id=51, + value_id=52, + key_type=StringType(), + value_type=StructType( + NestedField(511, "latitude", DoubleType()), + NestedField(512, "longitude", DoubleType()), + NestedField(513, "altitude", DoubleType(), required=False), + ), + element_required=False, + ), + required=False, + ), + ) + + result_table = project(schema, [file_map_of_structs]) + assert len(result_table.columns) == 1 + assert len(result_table.columns[0]) == 3 for actual, expected in zip( result_table.columns[0], [ [ - {"latitude": 52.371807, "longitude": 4.896029, "altitude": None}, - {"latitude": 52.387386, "longitude": 4.646219, "altitude": None}, + ("1", {"latitude": 52.371807, "longitude": 4.896029, "altitude": None}), + ("2", {"latitude": 52.387386, "longitude": 4.646219, "altitude": None}), ], [], [ - {"latitude": 52.078663, "longitude": 4.288788, "altitude": None}, - {"latitude": 52.387386, "longitude": 4.646219, "altitude": None}, + ("3", {"latitude": 52.078663, "longitude": 4.288788, "altitude": None}), + ("4", {"latitude": 52.387386, "longitude": 4.646219, "altitude": None}), ], ], ): assert actual.as_py() == expected assert ( repr(result_table.schema) - == """locations: list> - child 0, item: struct - child 0, latitude: double not null - child 1, longitude: double not null - child 2, altitude: double""" + == """locations: map> + child 0, entries: struct not null> not null + child 0, key: string not null + child 1, value: struct not null + child 0, latitude: double not null + child 1, longitude: double not null + child 2, altitude: double""" ) @@ -1184,10 +1305,10 @@ def test_projection_nested_struct_different_parent_id(file_struct: str) -> None: == """location: struct child 0, lat: double -- field metadata -- - field_id: '41' + PARQUET:field_id: '41' child 1, long: double -- field metadata -- - field_id: '42'""" + PARQUET:field_id: '42'""" ) @@ -1549,3 +1670,43 @@ def check_results(location: str, expected_schema: str, expected_netloc: str, exp def test_make_compatible_name() -> None: assert make_compatible_name("label/abc") == "label_x2Fabc" assert make_compatible_name("label?abc") == "label_x3Fabc" + + +@pytest.mark.parametrize( + "vals, primitive_type, expected_result", + [ + ([None, 2, 1], IntegerType(), 1), + ([1, None, 2], IntegerType(), 1), + ([None, None, None], IntegerType(), None), + ([None, date(2024, 2, 4), date(2024, 1, 2)], DateType(), date(2024, 1, 2)), + ([date(2024, 1, 2), None, date(2024, 2, 4)], DateType(), date(2024, 1, 2)), + ([None, None, None], DateType(), None), + ], +) +def test_stats_aggregator_update_min(vals: List[Any], primitive_type: PrimitiveType, expected_result: Any) -> None: + stats = StatsAggregator(primitive_type, _primitive_to_physical(primitive_type)) + + for val in vals: + stats.update_min(val) + + assert stats.current_min == expected_result + + +@pytest.mark.parametrize( + "vals, primitive_type, expected_result", + [ + ([None, 2, 1], IntegerType(), 2), + ([1, None, 2], IntegerType(), 2), + ([None, None, None], IntegerType(), None), + ([None, date(2024, 2, 4), date(2024, 1, 2)], DateType(), date(2024, 2, 4)), + ([date(2024, 1, 2), None, date(2024, 2, 4)], DateType(), date(2024, 2, 4)), + ([None, None, None], DateType(), None), + ], +) +def test_stats_aggregator_update_max(vals: List[Any], primitive_type: PrimitiveType, expected_result: Any) -> None: + stats = StatsAggregator(primitive_type, _primitive_to_physical(primitive_type)) + + for val in vals: + stats.update_max(val) + + assert stats.current_max == expected_result diff --git a/tests/io/test_pyarrow_stats.py b/tests/io/test_pyarrow_stats.py index 6f00061174..01b844a43e 100644 --- a/tests/io/test_pyarrow_stats.py +++ b/tests/io/test_pyarrow_stats.py @@ -81,7 +81,7 @@ class TestStruct: y: Optional[float] -def construct_test_table() -> Tuple[Any, Any, Union[TableMetadataV1, TableMetadataV2]]: +def construct_test_table() -> Tuple[pq.FileMetaData, Union[TableMetadataV1, TableMetadataV2]]: table_metadata = { "format-version": 2, "location": "s3://bucket/test/location", @@ -172,7 +172,7 @@ def construct_test_table() -> Tuple[Any, Any, Union[TableMetadataV1, TableMetada with pq.ParquetWriter(f, table.schema, metadata_collector=metadata_collector) as writer: writer.write_table(table) - return f.getvalue(), metadata_collector[0], table_metadata + return metadata_collector[0], table_metadata def get_current_schema( @@ -182,45 +182,27 @@ def get_current_schema( def test_record_count() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) assert datafile.record_count == 4 -def test_file_size() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() - - schema = get_current_schema(table_metadata) - datafile = DataFile() - fill_parquet_file_metadata( - datafile, - metadata, - len(file_bytes), - compute_statistics_plan(schema, table_metadata.properties), - parquet_path_to_id_mapping(schema), - ) - - assert datafile.file_size_in_bytes == len(file_bytes) - - def test_value_counts() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -236,14 +218,13 @@ def test_value_counts() -> None: def test_column_sizes() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -258,14 +239,13 @@ def test_column_sizes() -> None: def test_null_and_nan_counts() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -287,14 +267,13 @@ def test_null_and_nan_counts() -> None: def test_bounds() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -332,7 +311,7 @@ def test_metrics_mode_parsing() -> None: def test_metrics_mode_none() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -340,7 +319,6 @@ def test_metrics_mode_none() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -353,7 +331,7 @@ def test_metrics_mode_none() -> None: def test_metrics_mode_counts() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -361,7 +339,6 @@ def test_metrics_mode_counts() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -374,7 +351,7 @@ def test_metrics_mode_counts() -> None: def test_metrics_mode_full() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -382,7 +359,6 @@ def test_metrics_mode_full() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -401,7 +377,7 @@ def test_metrics_mode_full() -> None: def test_metrics_mode_non_default_trunc() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -409,7 +385,6 @@ def test_metrics_mode_non_default_trunc() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -428,7 +403,7 @@ def test_metrics_mode_non_default_trunc() -> None: def test_column_metrics_mode() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -437,7 +412,6 @@ def test_column_metrics_mode() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -455,7 +429,7 @@ def test_column_metrics_mode() -> None: assert 1 not in datafile.upper_bounds -def construct_test_table_primitive_types() -> Tuple[Any, Any, Union[TableMetadataV1, TableMetadataV2]]: +def construct_test_table_primitive_types() -> Tuple[pq.FileMetaData, Union[TableMetadataV1, TableMetadataV2]]: table_metadata = { "format-version": 2, "location": "s3://bucket/test/location", @@ -527,11 +501,11 @@ def construct_test_table_primitive_types() -> Tuple[Any, Any, Union[TableMetadat with pq.ParquetWriter(f, table.schema, metadata_collector=metadata_collector) as writer: writer.write_table(table) - return f.getvalue(), metadata_collector[0], table_metadata + return metadata_collector[0], table_metadata def test_metrics_primitive_types() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table_primitive_types() + metadata, table_metadata = construct_test_table_primitive_types() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -539,7 +513,6 @@ def test_metrics_primitive_types() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -579,7 +552,7 @@ def test_metrics_primitive_types() -> None: assert datafile.upper_bounds[12] == b"wp" -def construct_test_table_invalid_upper_bound() -> Tuple[Any, Any, Union[TableMetadataV1, TableMetadataV2]]: +def construct_test_table_invalid_upper_bound() -> Tuple[pq.FileMetaData, Union[TableMetadataV1, TableMetadataV2]]: table_metadata = { "format-version": 2, "location": "s3://bucket/test/location", @@ -627,11 +600,11 @@ def construct_test_table_invalid_upper_bound() -> Tuple[Any, Any, Union[TableMet with pq.ParquetWriter(f, table.schema, metadata_collector=metadata_collector) as writer: writer.write_table(table) - return f.getvalue(), metadata_collector[0], table_metadata + return metadata_collector[0], table_metadata def test_metrics_invalid_upper_bound() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table_invalid_upper_bound() + metadata, table_metadata = construct_test_table_invalid_upper_bound() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -639,7 +612,6 @@ def test_metrics_invalid_upper_bound() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -660,14 +632,13 @@ def test_metrics_invalid_upper_bound() -> None: def test_offsets() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index 5194d8660e..5b55bd61b6 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -23,11 +23,14 @@ from pyiceberg.io.pyarrow import ( _ConvertToArrowSchema, _ConvertToIceberg, + _ConvertToIcebergWithoutIDs, + _HasIds, pyarrow_to_schema, schema_to_pyarrow, visit_pyarrow, ) from pyiceberg.schema import Schema, visit +from pyiceberg.table.name_mapping import MappedField, NameMapping from pyiceberg.types import ( BinaryType, BooleanType, @@ -81,6 +84,18 @@ def test_pyarrow_boolean_to_iceberg() -> None: assert visit(converted_iceberg_type, _ConvertToArrowSchema()) == pyarrow_type +def test_pyarrow_int8_to_iceberg() -> None: + pyarrow_type = pa.int8() + converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) + assert converted_iceberg_type == IntegerType() + + +def test_pyarrow_int16_to_iceberg() -> None: + pyarrow_type = pa.int16() + converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) + assert converted_iceberg_type == IntegerType() + + def test_pyarrow_int32_to_iceberg() -> None: pyarrow_type = pa.int32() converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) @@ -200,20 +215,18 @@ def test_pyarrow_string_to_iceberg() -> None: def test_pyarrow_variable_binary_to_iceberg() -> None: - pyarrow_type = pa.binary() + pyarrow_type = pa.large_binary() converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) assert converted_iceberg_type == BinaryType() assert visit(converted_iceberg_type, _ConvertToArrowSchema()) == pyarrow_type def test_pyarrow_struct_to_iceberg() -> None: - pyarrow_struct = pa.struct( - [ - pa.field("foo", pa.string(), nullable=True, metadata={"field_id": "1", "doc": "foo doc"}), - pa.field("bar", pa.int32(), nullable=False, metadata={"field_id": "2"}), - pa.field("baz", pa.bool_(), nullable=True, metadata={"field_id": "3"}), - ] - ) + pyarrow_struct = pa.struct([ + pa.field("foo", pa.string(), nullable=True, metadata={"PARQUET:field_id": "1", "doc": "foo doc"}), + pa.field("bar", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "2"}), + pa.field("baz", pa.bool_(), nullable=True, metadata={"PARQUET:field_id": "3"}), + ]) expected = StructType( NestedField(field_id=1, name="foo", field_type=StringType(), required=False, doc="foo doc"), NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), @@ -223,7 +236,27 @@ def test_pyarrow_struct_to_iceberg() -> None: def test_pyarrow_list_to_iceberg() -> None: - pyarrow_list = pa.list_(pa.field("element", pa.int32(), nullable=False, metadata={"field_id": "1"})) + pyarrow_list = pa.list_(pa.field("element", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "1"})) + expected = ListType( + element_id=1, + element_type=IntegerType(), + element_required=True, + ) + assert visit_pyarrow(pyarrow_list, _ConvertToIceberg()) == expected + + +def test_pyarrow_large_list_to_iceberg() -> None: + pyarrow_list = pa.large_list(pa.field("element", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "1"})) + expected = ListType( + element_id=1, + element_type=IntegerType(), + element_required=True, + ) + assert visit_pyarrow(pyarrow_list, _ConvertToIceberg()) == expected + + +def test_pyarrow_fixed_size_list_to_iceberg() -> None: + pyarrow_list = pa.list_(pa.field("element", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "1"}), 1) expected = ListType( element_id=1, element_type=IntegerType(), @@ -234,8 +267,8 @@ def test_pyarrow_list_to_iceberg() -> None: def test_pyarrow_map_to_iceberg() -> None: pyarrow_map = pa.map_( - pa.field("key", pa.int32(), nullable=False, metadata={"field_id": "1"}), - pa.field("value", pa.string(), nullable=False, metadata={"field_id": "2"}), + pa.field("key", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "1"}), + pa.field("value", pa.string(), nullable=False, metadata={"PARQUET:field_id": "2"}), ) expected = MapType( key_id=1, @@ -269,3 +302,230 @@ def test_round_schema_conversion_nested(table_schema_nested: Schema) -> None: 15: person: optional struct<16: name: optional string, 17: age: required int> }""" assert actual == expected + + +def test_round_schema_large_string() -> None: + schema = pa.schema([pa.field("animals", pa.large_string())]) + actual = str(pyarrow_to_schema(schema, name_mapping=NameMapping([MappedField(field_id=1, names=["animals"])]))) + expected = """table { + 1: animals: optional string +}""" + assert actual == expected + + +def test_simple_schema_has_missing_ids() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert not has_ids + + +def test_simple_schema_has_missing_ids_partial() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False, metadata={"PARQUET:field_id": "1", "doc": "foo doc"}), + pa.field('bar', pa.int32(), nullable=False), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert not has_ids + + +def test_nested_schema_has_missing_ids() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False), + pa.field( + 'quux', + pa.map_( + pa.string(), + pa.map_(pa.string(), pa.int32()), + ), + nullable=False, + ), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert not has_ids + + +def test_nested_schema_has_ids() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False, metadata={"PARQUET:field_id": "1", "doc": "foo doc"}), + pa.field( + 'quux', + pa.map_( + pa.field("key", pa.string(), nullable=False, metadata={"PARQUET:field_id": "7"}), + pa.field( + "value", + pa.map_( + pa.field('key', pa.string(), nullable=False, metadata={"PARQUET:field_id": "9"}), + pa.field('value', pa.int32(), metadata={"PARQUET:field_id": "10"}), + ), + nullable=False, + metadata={"PARQUET:field_id": "8"}, + ), + ), + nullable=False, + metadata={"PARQUET:field_id": "6", "doc": "quux doc"}, + ), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert has_ids + + +def test_nested_schema_has_partial_missing_ids() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False, metadata={"PARQUET:field_id": "1", "doc": "foo doc"}), + pa.field( + 'quux', + pa.map_( + pa.field("key", pa.string(), nullable=False, metadata={"PARQUET:field_id": "7"}), + pa.field( + "value", + pa.map_(pa.field('key', pa.string(), nullable=False), pa.field('value', pa.int32())), + nullable=False, + ), + ), + nullable=False, + metadata={"PARQUET:field_id": "6", "doc": "quux doc"}, + ), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert not has_ids + + +def test_pyarrow_schema_to_schema_missing_ids_and_name_mapping(pyarrow_schema_simple_without_ids: pa.Schema) -> None: + schema = pyarrow_schema_simple_without_ids + with pytest.raises(ValueError) as exc_info: + _ = pyarrow_to_schema(schema) + assert ( + "Parquet file does not have field-ids and the Iceberg table does not have 'schema.name-mapping.default' defined" + in str(exc_info.value) + ) + + +def test_simple_pyarrow_schema_to_schema_missing_ids_using_name_mapping( + pyarrow_schema_simple_without_ids: pa.Schema, iceberg_schema_simple: Schema +) -> None: + schema = pyarrow_schema_simple_without_ids + name_mapping = NameMapping([ + MappedField(field_id=1, names=['foo']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + ]) + + assert pyarrow_to_schema(schema, name_mapping) == iceberg_schema_simple + + +def test_simple_pyarrow_schema_to_schema_missing_ids_using_name_mapping_partial_exception( + pyarrow_schema_simple_without_ids: pa.Schema, +) -> None: + schema = pyarrow_schema_simple_without_ids + name_mapping = NameMapping([ + MappedField(field_id=1, names=['foo']), + ]) + with pytest.raises(ValueError) as exc_info: + _ = pyarrow_to_schema(schema, name_mapping) + assert "Could not find field with name: bar" in str(exc_info.value) + + +def test_nested_pyarrow_schema_to_schema_missing_ids_using_name_mapping( + pyarrow_schema_nested_without_ids: pa.Schema, iceberg_schema_nested: Schema +) -> None: + schema = pyarrow_schema_nested_without_ids + + name_mapping = NameMapping([ + MappedField(field_id=1, names=['foo']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + MappedField(field_id=4, names=['qux'], fields=[MappedField(field_id=5, names=['element'])]), + MappedField( + field_id=6, + names=['quux'], + fields=[ + MappedField(field_id=7, names=['key']), + MappedField( + field_id=8, + names=['value'], + fields=[ + MappedField(field_id=9, names=['key']), + MappedField(field_id=10, names=['value']), + ], + ), + ], + ), + MappedField( + field_id=11, + names=['location'], + fields=[ + MappedField( + field_id=12, + names=['element'], + fields=[ + MappedField(field_id=13, names=['latitude']), + MappedField(field_id=14, names=['longitude']), + ], + ) + ], + ), + MappedField( + field_id=15, + names=['person'], + fields=[ + MappedField(field_id=16, names=['name']), + MappedField(field_id=17, names=['age']), + ], + ), + ]) + + assert pyarrow_to_schema(schema, name_mapping) == iceberg_schema_nested + + +def test_pyarrow_schema_to_schema_missing_ids_using_name_mapping_nested_missing_id() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False), + pa.field( + 'quux', + pa.map_( + pa.string(), + pa.map_(pa.string(), pa.int32()), + ), + nullable=False, + ), + ]) + + name_mapping = NameMapping([ + MappedField(field_id=1, names=['foo']), + MappedField( + field_id=6, + names=['quux'], + fields=[ + MappedField(field_id=7, names=['key']), + MappedField( + field_id=8, + names=['value'], + fields=[ + MappedField(field_id=10, names=['value']), + ], + ), + ], + ), + ]) + with pytest.raises(ValueError) as exc_info: + _ = pyarrow_to_schema(schema, name_mapping) + assert "Could not find field with name: quux.value.key" in str(exc_info.value) + + +def test_pyarrow_schema_to_schema_fresh_ids_simple_schema( + pyarrow_schema_simple_without_ids: pa.Schema, iceberg_schema_simple_no_ids: Schema +) -> None: + assert visit_pyarrow(pyarrow_schema_simple_without_ids, _ConvertToIcebergWithoutIDs()) == iceberg_schema_simple_no_ids + + +def test_pyarrow_schema_to_schema_fresh_ids_nested_schema( + pyarrow_schema_nested_without_ids: pa.Schema, iceberg_schema_nested_no_ids: Schema +) -> None: + assert visit_pyarrow(pyarrow_schema_nested_without_ids, _ConvertToIcebergWithoutIDs()) == iceberg_schema_nested_no_ids diff --git a/tests/table/test_init.py b/tests/table/test_init.py index 04d467c318..f734211510 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -17,11 +17,14 @@ # pylint:disable=redefined-outer-name import uuid from copy import copy -from typing import Dict +from typing import Any, Dict +import pyarrow as pa import pytest +from pydantic import ValidationError from sortedcontainers import SortedList +from pyiceberg.catalog.noop import NoopCatalog from pyiceberg.exceptions import CommitFailedException from pyiceberg.expressions import ( AlwaysTrue, @@ -29,7 +32,7 @@ EqualTo, In, ) -from pyiceberg.io import PY_IO_IMPL +from pyiceberg.io import PY_IO_IMPL, load_file_io from pyiceberg.manifest import ( DataFile, DataFileContent, @@ -41,6 +44,7 @@ from pyiceberg.schema import Schema from pyiceberg.table import ( AddSnapshotUpdate, + AddSortOrderUpdate, AssertCreate, AssertCurrentSchemaId, AssertDefaultSortOrderId, @@ -49,19 +53,21 @@ AssertLastAssignedPartitionId, AssertRefSnapshotId, AssertTableUUID, + RemovePropertiesUpdate, + SetDefaultSortOrderUpdate, SetPropertiesUpdate, SetSnapshotRefUpdate, - SnapshotRef, StaticTable, Table, UpdateSchema, _apply_table_update, - _generate_snapshot_id, - _match_deletes_to_datafile, + _check_schema, + _match_deletes_to_data_file, _TableMetadataUpdateContext, update_table_metadata, ) -from pyiceberg.table.metadata import INITIAL_SEQUENCE_NUMBER, TableMetadataUtil, TableMetadataV2 +from pyiceberg.table.metadata import INITIAL_SEQUENCE_NUMBER, TableMetadataUtil, TableMetadataV2, _generate_snapshot_id +from pyiceberg.table.refs import SnapshotRef from pyiceberg.table.snapshots import ( Operation, Snapshot, @@ -101,26 +107,26 @@ def test_schema(table_v2: Table) -> None: NestedField(field_id=1, name="x", field_type=LongType(), required=True), NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), - schema_id=1, identifier_field_ids=[1, 2], ) + assert table_v2.schema().schema_id == 1 def test_schemas(table_v2: Table) -> None: assert table_v2.schemas() == { 0: Schema( NestedField(field_id=1, name="x", field_type=LongType(), required=True), - schema_id=0, identifier_field_ids=[], ), 1: Schema( NestedField(field_id=1, name="x", field_type=LongType(), required=True), NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), - schema_id=1, identifier_field_ids=[1, 2], ), } + assert table_v2.schemas()[0].schema_id == 0 + assert table_v2.schemas()[1].schema_id == 1 def test_spec(table_v2: Table) -> None: @@ -260,31 +266,34 @@ def test_table_scan_ref_does_not_exists(table_v2: Table) -> None: def test_table_scan_projection_full_schema(table_v2: Table) -> None: scan = table_v2.scan() - assert scan.select("x", "y", "z").projection() == Schema( + projection_schema = scan.select("x", "y", "z").projection() + assert projection_schema == Schema( NestedField(field_id=1, name="x", field_type=LongType(), required=True), NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), - schema_id=1, identifier_field_ids=[1, 2], ) + assert projection_schema.schema_id == 1 def test_table_scan_projection_single_column(table_v2: Table) -> None: scan = table_v2.scan() - assert scan.select("y").projection() == Schema( + projection_schema = scan.select("y").projection() + assert projection_schema == Schema( NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), - schema_id=1, identifier_field_ids=[2], ) + assert projection_schema.schema_id == 1 def test_table_scan_projection_single_column_case_sensitive(table_v2: Table) -> None: scan = table_v2.scan() - assert scan.with_case_sensitive(False).select("Y").projection() == Schema( + projection_schema = scan.with_case_sensitive(False).select("Y").projection() + assert projection_schema == Schema( NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), - schema_id=1, identifier_field_ids=[2], ) + assert projection_schema.schema_id == 1 def test_table_scan_projection_unknown_column(table_v2: Table) -> None: @@ -356,7 +365,7 @@ def test_match_deletes_to_datafile() -> None: upper_bounds={}, ), ) - assert _match_deletes_to_datafile( + assert _match_deletes_to_data_file( data_entry, SortedList(iterable=[delete_entry_1, delete_entry_2], key=lambda entry: entry.sequence_number or INITIAL_SEQUENCE_NUMBER), ) == { @@ -413,7 +422,7 @@ def test_match_deletes_to_datafile_duplicate_number() -> None: upper_bounds={}, ), ) - assert _match_deletes_to_datafile( + assert _match_deletes_to_data_file( data_entry, SortedList(iterable=[delete_entry_1, delete_entry_2], key=lambda entry: entry.sequence_number or INITIAL_SEQUENCE_NUMBER), ) == { @@ -423,11 +432,13 @@ def test_match_deletes_to_datafile_duplicate_number() -> None: def test_serialize_set_properties_updates() -> None: - assert SetPropertiesUpdate(updates={"abc": "🤪"}).model_dump_json() == """{"action":"set-properties","updates":{"abc":"🤪"}}""" + assert ( + SetPropertiesUpdate(updates={"abc": "🤪"}).model_dump_json() == """{"action":"set-properties","updates":{"abc":"🤪"}}""" + ) def test_add_column(table_v2: Table) -> None: - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) update.add_column(path="b", field_type=IntegerType()) apply_schema: Schema = update._apply() # pylint: disable=W0212 assert len(apply_schema.fields) == 4 @@ -461,7 +472,7 @@ def test_add_primitive_type_column(table_v2: Table) -> None: for name, type_ in primitive_type.items(): field_name = f"new_column_{name}" - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) update.add_column(path=field_name, field_type=type_, doc=f"new_column_{name}") new_schema = update._apply() # pylint: disable=W0212 @@ -473,7 +484,7 @@ def test_add_primitive_type_column(table_v2: Table) -> None: def test_add_nested_type_column(table_v2: Table) -> None: # add struct type column field_name = "new_column_struct" - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) struct_ = StructType( NestedField(1, "lat", DoubleType()), NestedField(2, "long", DoubleType()), @@ -491,7 +502,7 @@ def test_add_nested_type_column(table_v2: Table) -> None: def test_add_nested_map_type_column(table_v2: Table) -> None: # add map type column field_name = "new_column_map" - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) map_ = MapType(1, StringType(), 2, IntegerType(), False) update.add_column(path=field_name, field_type=map_) new_schema = update._apply() # pylint: disable=W0212 @@ -503,7 +514,7 @@ def test_add_nested_map_type_column(table_v2: Table) -> None: def test_add_nested_list_type_column(table_v2: Table) -> None: # add list type column field_name = "new_column_list" - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) list_ = ListType( element_id=101, element_type=StructType( @@ -526,6 +537,51 @@ def test_add_nested_list_type_column(table_v2: Table) -> None: assert new_schema.highest_field_id == 7 +def test_apply_set_properties_update(table_v2: Table) -> None: + base_metadata = table_v2.metadata + + new_metadata_no_update = update_table_metadata(base_metadata, (SetPropertiesUpdate(updates={}),)) + assert new_metadata_no_update == base_metadata + + new_metadata = update_table_metadata( + base_metadata, (SetPropertiesUpdate(updates={"read.split.target.size": "123", "test_a": "test_a", "test_b": "test_b"}),) + ) + + assert base_metadata.properties == {"read.split.target.size": "134217728"} + assert new_metadata.properties == {"read.split.target.size": "123", "test_a": "test_a", "test_b": "test_b"} + + new_metadata_add_only = update_table_metadata(new_metadata, (SetPropertiesUpdate(updates={"test_c": "test_c"}),)) + + assert new_metadata_add_only.properties == { + "read.split.target.size": "123", + "test_a": "test_a", + "test_b": "test_b", + "test_c": "test_c", + } + + +def test_apply_remove_properties_update(table_v2: Table) -> None: + base_metadata = update_table_metadata( + table_v2.metadata, + (SetPropertiesUpdate(updates={"test_a": "test_a", "test_b": "test_b", "test_c": "test_c", "test_d": "test_d"}),), + ) + + new_metadata_no_removal = update_table_metadata(base_metadata, (RemovePropertiesUpdate(removals=[]),)) + + assert base_metadata == new_metadata_no_removal + + new_metadata = update_table_metadata(base_metadata, (RemovePropertiesUpdate(removals=["test_a", "test_c"]),)) + + assert base_metadata.properties == { + "read.split.target.size": "134217728", + "test_a": "test_a", + "test_b": "test_b", + "test_c": "test_c", + "test_d": "test_d", + } + assert new_metadata.properties == {"read.split.target.size": "134217728", "test_b": "test_b", "test_d": "test_d"} + + def test_apply_add_schema_update(table_v2: Table) -> None: transaction = table_v2.transaction() update = transaction.update_schema() @@ -534,18 +590,14 @@ def test_apply_add_schema_update(table_v2: Table) -> None: test_context = _TableMetadataUpdateContext() - new_table_metadata = _apply_table_update( - transaction._updates[0], base_metadata=table_v2.metadata, context=test_context - ) # pylint: disable=W0212 + new_table_metadata = _apply_table_update(transaction._updates[0], base_metadata=table_v2.metadata, context=test_context) # pylint: disable=W0212 assert len(new_table_metadata.schemas) == 3 assert new_table_metadata.current_schema_id == 1 assert len(test_context._updates) == 1 assert test_context._updates[0] == transaction._updates[0] # pylint: disable=W0212 assert test_context.is_added_schema(2) - new_table_metadata = _apply_table_update( - transaction._updates[1], base_metadata=new_table_metadata, context=test_context - ) # pylint: disable=W0212 + new_table_metadata = _apply_table_update(transaction._updates[1], base_metadata=new_table_metadata, context=test_context) # pylint: disable=W0212 assert len(new_table_metadata.schemas) == 3 assert new_table_metadata.current_schema_id == 2 assert len(test_context._updates) == 2 @@ -617,6 +669,26 @@ def test_update_metadata_set_snapshot_ref(table_v2: Table) -> None: ) +def test_update_metadata_add_update_sort_order(table_v2: Table) -> None: + new_sort_order = SortOrder(order_id=table_v2.sort_order().order_id + 1) + new_metadata = update_table_metadata( + table_v2.metadata, + (AddSortOrderUpdate(sort_order=new_sort_order), SetDefaultSortOrderUpdate(sort_order_id=-1)), + ) + assert len(new_metadata.sort_orders) == 2 + assert new_metadata.sort_orders[-1] == new_sort_order + assert new_metadata.default_sort_order_id == new_sort_order.order_id + + +def test_update_metadata_update_sort_order_invalid(table_v2: Table) -> None: + with pytest.raises(ValueError, match="Cannot set current sort order to the last added one when no sort order has been added"): + update_table_metadata(table_v2.metadata, (SetDefaultSortOrderUpdate(sort_order_id=-1),)) + + invalid_order_id = 10 + with pytest.raises(ValueError, match=f"Sort order with id {invalid_order_id} does not exist"): + update_table_metadata(table_v2.metadata, (SetDefaultSortOrderUpdate(sort_order_id=invalid_order_id),)) + + def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: base_metadata = table_v1.metadata transaction = table_v1.transaction() @@ -626,6 +698,8 @@ def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: schema_update_1.add_column(path="b", field_type=IntegerType()) schema_update_1.commit() + transaction.set_properties(owner="test", test_a="test_a", test_b="test_b", test_c="test_c") + test_updates = transaction._updates # pylint: disable=W0212 new_snapshot = Snapshot( @@ -640,6 +714,7 @@ def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: test_updates += ( AddSnapshotUpdate(snapshot=new_snapshot), + SetPropertiesUpdate(updates={"test_a": "test_a1"}), SetSnapshotRefUpdate( ref_name="main", type="branch", @@ -648,6 +723,7 @@ def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: max_snapshot_age_ms=12312312312, min_snapshots_to_keep=1, ), + RemovePropertiesUpdate(removals=["test_c", "test_b"]), ) new_metadata = update_table_metadata(base_metadata, test_updates) @@ -682,6 +758,9 @@ def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: max_ref_age_ms=123123123, ) + # Set/RemovePropertiesUpdate + assert new_metadata.properties == {"owner": "test", "test_a": "test_a1"} + def test_metadata_isolation_from_illegal_updates(table_v1: Table) -> None: base_metadata = table_v1.metadata @@ -730,7 +809,7 @@ def test_metadata_isolation_from_illegal_updates(table_v1: Table) -> None: def test_generate_snapshot_id(table_v2: Table) -> None: assert isinstance(_generate_snapshot_id(), int) - assert isinstance(table_v2.new_snapshot_id(), int) + assert isinstance(table_v2.metadata.new_snapshot_id(), int) def test_assert_create(table_v2: Table) -> None: @@ -848,3 +927,189 @@ def test_assert_default_sort_order_id(table_v2: Table) -> None: match="Requirement failed: default sort order id has changed: expected 1, found 3", ): AssertDefaultSortOrderId(default_sort_order_id=1).validate(base_metadata) + + +def test_correct_schema() -> None: + table_metadata = TableMetadataV2(**{ + "format-version": 2, + "table-uuid": "9c12d441-03fe-4693-9a96-a0705ddf69c1", + "location": "s3://bucket/test/location", + "last-sequence-number": 34, + "last-updated-ms": 1602638573590, + "last-column-id": 3, + "current-schema-id": 1, + "schemas": [ + {"type": "struct", "schema-id": 0, "fields": [{"id": 1, "name": "x", "required": True, "type": "long"}]}, + { + "type": "struct", + "schema-id": 1, + "identifier-field-ids": [1, 2], + "fields": [ + {"id": 1, "name": "x", "required": True, "type": "long"}, + {"id": 2, "name": "y", "required": True, "type": "long"}, + {"id": 3, "name": "z", "required": True, "type": "long"}, + ], + }, + ], + "default-spec-id": 0, + "partition-specs": [{"spec-id": 0, "fields": [{"name": "x", "transform": "identity", "source-id": 1, "field-id": 1000}]}], + "last-partition-id": 1000, + "default-sort-order-id": 0, + "sort-orders": [], + "current-snapshot-id": 123, + "snapshots": [ + { + "snapshot-id": 234, + "timestamp-ms": 1515100955770, + "sequence-number": 0, + "summary": {"operation": "append"}, + "manifest-list": "s3://a/b/1.avro", + "schema-id": 10, + }, + { + "snapshot-id": 123, + "timestamp-ms": 1515100955770, + "sequence-number": 0, + "summary": {"operation": "append"}, + "manifest-list": "s3://a/b/1.avro", + "schema-id": 0, + }, + ], + }) + + t = Table( + identifier=("default", "t1"), + metadata=table_metadata, + metadata_location="s3://../..", + io=load_file_io(), + catalog=NoopCatalog("NoopCatalog"), + ) + + # Should use the current schema, instead the one from the snapshot + projection_schema = t.scan().projection() + assert projection_schema == Schema( + NestedField(field_id=1, name='x', field_type=LongType(), required=True), + NestedField(field_id=2, name='y', field_type=LongType(), required=True), + NestedField(field_id=3, name='z', field_type=LongType(), required=True), + identifier_field_ids=[1, 2], + ) + assert projection_schema.schema_id == 1 + + # When we explicitly filter on the commit, we want to have the schema that's linked to the snapshot + projection_schema = t.scan(snapshot_id=123).projection() + assert projection_schema == Schema( + NestedField(field_id=1, name='x', field_type=LongType(), required=True), + identifier_field_ids=[], + ) + assert projection_schema.schema_id == 0 + + with pytest.warns(UserWarning, match="Metadata does not contain schema with id: 10"): + t.scan(snapshot_id=234).projection() + + # Invalid snapshot + with pytest.raises(ValueError) as exc_info: + _ = t.scan(snapshot_id=-1).projection() + + assert "Snapshot not found: -1" in str(exc_info.value) + + +def test_schema_mismatch_type(table_schema_simple: Schema) -> None: + other_schema = pa.schema(( + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.decimal128(18, 6), nullable=False), + pa.field("baz", pa.bool_(), nullable=True), + )) + + expected = r"""Mismatch in fields: +┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃ ┃ Table field ┃ Dataframe field ┃ +┡━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ ✅ │ 1: foo: optional string │ 1: foo: optional string │ +│ ❌ │ 2: bar: required int │ 2: bar: required decimal\(18, 6\) │ +│ ✅ │ 3: baz: optional boolean │ 3: baz: optional boolean │ +└────┴──────────────────────────┴─────────────────────────────────┘ +""" + + with pytest.raises(ValueError, match=expected): + _check_schema(table_schema_simple, other_schema) + + +def test_schema_mismatch_nullability(table_schema_simple: Schema) -> None: + other_schema = pa.schema(( + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=True), + pa.field("baz", pa.bool_(), nullable=True), + )) + + expected = """Mismatch in fields: +┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃ ┃ Table field ┃ Dataframe field ┃ +┡━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ ✅ │ 1: foo: optional string │ 1: foo: optional string │ +│ ❌ │ 2: bar: required int │ 2: bar: optional int │ +│ ✅ │ 3: baz: optional boolean │ 3: baz: optional boolean │ +└────┴──────────────────────────┴──────────────────────────┘ +""" + + with pytest.raises(ValueError, match=expected): + _check_schema(table_schema_simple, other_schema) + + +def test_schema_mismatch_missing_field(table_schema_simple: Schema) -> None: + other_schema = pa.schema(( + pa.field("foo", pa.string(), nullable=True), + pa.field("baz", pa.bool_(), nullable=True), + )) + + expected = """Mismatch in fields: +┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃ ┃ Table field ┃ Dataframe field ┃ +┡━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ ✅ │ 1: foo: optional string │ 1: foo: optional string │ +│ ❌ │ 2: bar: required int │ Missing │ +│ ✅ │ 3: baz: optional boolean │ 3: baz: optional boolean │ +└────┴──────────────────────────┴──────────────────────────┘ +""" + + with pytest.raises(ValueError, match=expected): + _check_schema(table_schema_simple, other_schema) + + +def test_schema_mismatch_additional_field(table_schema_simple: Schema) -> None: + other_schema = pa.schema(( + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=True), + pa.field("baz", pa.bool_(), nullable=True), + pa.field("new_field", pa.date32(), nullable=True), + )) + + expected = r"PyArrow table contains more columns: new_field. Update the schema first \(hint, use union_by_name\)." + + with pytest.raises(ValueError, match=expected): + _check_schema(table_schema_simple, other_schema) + + +def test_table_properties(example_table_metadata_v2: Dict[str, Any]) -> None: + # metadata properties are all strings + for k, v in example_table_metadata_v2["properties"].items(): + assert isinstance(k, str) + assert isinstance(v, str) + metadata = TableMetadataV2(**example_table_metadata_v2) + for k, v in metadata.properties.items(): + assert isinstance(k, str) + assert isinstance(v, str) + + # property can be set to int, but still serialized as string + property_with_int = {"property_name": 42} + new_example_table_metadata_v2 = {**example_table_metadata_v2, "properties": property_with_int} + assert isinstance(new_example_table_metadata_v2["properties"]["property_name"], int) + new_metadata = TableMetadataV2(**new_example_table_metadata_v2) + assert isinstance(new_metadata.properties["property_name"], str) + + +def test_table_properties_raise_for_none_value(example_table_metadata_v2: Dict[str, Any]) -> None: + property_with_none = {"property_name": None} + example_table_metadata_v2 = {**example_table_metadata_v2, "properties": property_with_none} + with pytest.raises(ValidationError) as exc_info: + TableMetadataV2(**example_table_metadata_v2) + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) diff --git a/tests/table/test_metadata.py b/tests/table/test_metadata.py index 2c453b6e03..0cf17b11a2 100644 --- a/tests/table/test_metadata.py +++ b/tests/table/test_metadata.py @@ -29,7 +29,6 @@ from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.serializers import FromByteStream -from pyiceberg.table import SortOrder from pyiceberg.table.metadata import ( TableMetadataUtil, TableMetadataV1, @@ -37,7 +36,7 @@ new_table_metadata, ) from pyiceberg.table.refs import SnapshotRef, SnapshotRefType -from pyiceberg.table.sorting import NullOrder, SortDirection, SortField +from pyiceberg.table.sorting import NullOrder, SortDirection, SortField, SortOrder from pyiceberg.transforms import IdentityTransform from pyiceberg.typedef import UTF8 from pyiceberg.types import ( @@ -120,7 +119,6 @@ def test_v1_metadata_parsing_directly(example_table_metadata_v1: Dict[str, Any]) NestedField(field_id=1, name="x", field_type=LongType(), required=True), NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), - schema_id=0, identifier_field_ids=[], ) ] @@ -199,6 +197,75 @@ def test_migrate_v1_partition_specs(example_table_metadata_v1: Dict[str, Any]) - ] +def test_new_table_metadata_with_explicit_v1_format() -> None: + schema = Schema( + NestedField(field_id=10, name="foo", field_type=StringType(), required=False), + NestedField(field_id=22, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=33, name="baz", field_type=BooleanType(), required=False), + schema_id=10, + identifier_field_ids=[22], + ) + + partition_spec = PartitionSpec( + PartitionField(source_id=22, field_id=1022, transform=IdentityTransform(), name="bar"), spec_id=10 + ) + + sort_order = SortOrder( + SortField(source_id=10, transform=IdentityTransform(), direction=SortDirection.ASC, null_order=NullOrder.NULLS_LAST), + order_id=10, + ) + + actual = new_table_metadata( + schema=schema, + partition_spec=partition_spec, + sort_order=sort_order, + location="s3://some_v1_location/", + properties={'format-version': "1"}, + ) + + expected_schema = Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + schema_id=0, + identifier_field_ids=[2], + ) + + expected_spec = PartitionSpec(PartitionField(source_id=2, field_id=1000, transform=IdentityTransform(), name="bar")) + + expected = TableMetadataV1( + location="s3://some_v1_location/", + table_uuid=actual.table_uuid, + last_updated_ms=actual.last_updated_ms, + last_column_id=3, + schemas=[expected_schema], + schema_=expected_schema, + current_schema_id=0, + partition_spec=[field.model_dump() for field in expected_spec.fields], + partition_specs=[expected_spec], + default_spec_id=0, + last_partition_id=1000, + properties={}, + current_snapshot_id=None, + snapshots=[], + snapshot_log=[], + metadata_log=[], + sort_orders=[ + SortOrder( + SortField( + source_id=1, transform=IdentityTransform(), direction=SortDirection.ASC, null_order=NullOrder.NULLS_LAST + ), + order_id=1, + ) + ], + default_sort_order_id=1, + refs={}, + format_version=1, + ) + + assert actual.model_dump() == expected.model_dump() + + def test_invalid_format_version(example_table_metadata_v1: Dict[str, Any]) -> None: """Test the exception when trying to load an unknown version""" diff --git a/tests/table/test_name_mapping.py b/tests/table/test_name_mapping.py index 37111a5e3e..e039415ce3 100644 --- a/tests/table/test_name_mapping.py +++ b/tests/table/test_name_mapping.py @@ -17,56 +17,61 @@ import pytest from pyiceberg.schema import Schema -from pyiceberg.table.name_mapping import MappedField, NameMapping, create_mapping_from_schema, parse_mapping_from_json +from pyiceberg.table.name_mapping import ( + MappedField, + NameMapping, + create_mapping_from_schema, + parse_mapping_from_json, + update_mapping, +) +from pyiceberg.types import NestedField, StringType @pytest.fixture(scope="session") def table_name_mapping_nested() -> NameMapping: - return NameMapping( - [ - MappedField(field_id=1, names=['foo']), - MappedField(field_id=2, names=['bar']), - MappedField(field_id=3, names=['baz']), - MappedField(field_id=4, names=['qux'], fields=[MappedField(field_id=5, names=['element'])]), - MappedField( - field_id=6, - names=['quux'], - fields=[ - MappedField(field_id=7, names=['key']), - MappedField( - field_id=8, - names=['value'], - fields=[ - MappedField(field_id=9, names=['key']), - MappedField(field_id=10, names=['value']), - ], - ), - ], - ), - MappedField( - field_id=11, - names=['location'], - fields=[ - MappedField( - field_id=12, - names=['element'], - fields=[ - MappedField(field_id=13, names=['latitude']), - MappedField(field_id=14, names=['longitude']), - ], - ) - ], - ), - MappedField( - field_id=15, - names=['person'], - fields=[ - MappedField(field_id=16, names=['name']), - MappedField(field_id=17, names=['age']), - ], - ), - ] - ) + return NameMapping([ + MappedField(field_id=1, names=['foo']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + MappedField(field_id=4, names=['qux'], fields=[MappedField(field_id=5, names=['element'])]), + MappedField( + field_id=6, + names=['quux'], + fields=[ + MappedField(field_id=7, names=['key']), + MappedField( + field_id=8, + names=['value'], + fields=[ + MappedField(field_id=9, names=['key']), + MappedField(field_id=10, names=['value']), + ], + ), + ], + ), + MappedField( + field_id=11, + names=['location'], + fields=[ + MappedField( + field_id=12, + names=['element'], + fields=[ + MappedField(field_id=13, names=['latitude']), + MappedField(field_id=14, names=['longitude']), + ], + ) + ], + ), + MappedField( + field_id=15, + names=['person'], + fields=[ + MappedField(field_id=16, names=['name']), + MappedField(field_id=17, names=['age']), + ], + ), + ]) def test_json_mapped_field_deserialization() -> None: @@ -127,20 +132,18 @@ def test_json_name_mapping_deserialization() -> None: ] """ - assert parse_mapping_from_json(name_mapping) == NameMapping( - [ - MappedField(field_id=1, names=['id', 'record_id']), - MappedField(field_id=2, names=['data']), - MappedField( - names=['location'], - field_id=3, - fields=[ - MappedField(field_id=4, names=['latitude', 'lat']), - MappedField(field_id=5, names=['longitude', 'long']), - ], - ), - ] - ) + assert parse_mapping_from_json(name_mapping) == NameMapping([ + MappedField(field_id=1, names=['id', 'record_id']), + MappedField(field_id=2, names=['data']), + MappedField( + names=['location'], + field_id=3, + fields=[ + MappedField(field_id=4, names=['latitude', 'lat']), + MappedField(field_id=5, names=['longitude', 'long']), + ], + ), + ]) def test_json_serialization(table_name_mapping_nested: NameMapping) -> None: @@ -151,20 +154,18 @@ def test_json_serialization(table_name_mapping_nested: NameMapping) -> None: def test_name_mapping_to_string() -> None: - nm = NameMapping( - [ - MappedField(field_id=1, names=['id', 'record_id']), - MappedField(field_id=2, names=['data']), - MappedField( - names=['location'], - field_id=3, - fields=[ - MappedField(field_id=4, names=['lat', 'latitude']), - MappedField(field_id=5, names=['long', 'longitude']), - ], - ), - ] - ) + nm = NameMapping([ + MappedField(field_id=1, names=['id', 'record_id']), + MappedField(field_id=2, names=['data']), + MappedField( + names=['location'], + field_id=3, + fields=[ + MappedField(field_id=4, names=['lat', 'latitude']), + MappedField(field_id=5, names=['long', 'longitude']), + ], + ), + ]) assert ( str(nm) @@ -244,3 +245,67 @@ def test_mapping_lookup_by_name(table_name_mapping_nested: NameMapping) -> None: with pytest.raises(ValueError, match="Could not find field with name: boom"): table_name_mapping_nested.find("boom") + + +def test_invalid_mapped_field() -> None: + with pytest.raises(ValueError): + MappedField(field_id=1, names=[]) + + +def test_update_mapping_no_updates_or_adds(table_name_mapping_nested: NameMapping) -> None: + assert update_mapping(table_name_mapping_nested, {}, {}) == table_name_mapping_nested + + +def test_update_mapping(table_name_mapping_nested: NameMapping) -> None: + updates = {1: NestedField(1, "foo_update", StringType(), True)} + adds = { + -1: [NestedField(18, "add_18", StringType(), True)], + 15: [NestedField(19, "name", StringType(), True), NestedField(20, "add_20", StringType(), True)], + } + + expected = NameMapping([ + MappedField(field_id=1, names=['foo', 'foo_update']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + MappedField(field_id=4, names=['qux'], fields=[MappedField(field_id=5, names=['element'])]), + MappedField( + field_id=6, + names=['quux'], + fields=[ + MappedField(field_id=7, names=['key']), + MappedField( + field_id=8, + names=['value'], + fields=[ + MappedField(field_id=9, names=['key']), + MappedField(field_id=10, names=['value']), + ], + ), + ], + ), + MappedField( + field_id=11, + names=['location'], + fields=[ + MappedField( + field_id=12, + names=['element'], + fields=[ + MappedField(field_id=13, names=['latitude']), + MappedField(field_id=14, names=['longitude']), + ], + ) + ], + ), + MappedField( + field_id=15, + names=['person'], + fields=[ + MappedField(field_id=17, names=['age']), + MappedField(field_id=19, names=['name']), + MappedField(field_id=20, names=['add_20']), + ], + ), + MappedField(field_id=18, names=['add_18']), + ]) + assert update_mapping(table_name_mapping_nested, updates, adds) == expected diff --git a/tests/table/test_partitioning.py b/tests/table/test_partitioning.py index cb60c9a8e5..d7425bc351 100644 --- a/tests/table/test_partitioning.py +++ b/tests/table/test_partitioning.py @@ -127,5 +127,5 @@ def test_partition_type(table_schema_simple: Schema) -> None: assert spec.partition_type(table_schema_simple) == StructType( NestedField(field_id=1000, name="str_truncate", field_type=StringType(), required=False), - NestedField(field_id=1001, name="int_bucket", field_type=IntegerType(), required=False), + NestedField(field_id=1001, name="int_bucket", field_type=IntegerType(), required=True), ) diff --git a/tests/table/test_snapshots.py b/tests/table/test_snapshots.py index 124c513022..3591847ad6 100644 --- a/tests/table/test_snapshots.py +++ b/tests/table/test_snapshots.py @@ -18,7 +18,7 @@ import pytest from pyiceberg.manifest import DataFile, DataFileContent, ManifestContent, ManifestFile -from pyiceberg.table.snapshots import Operation, Snapshot, SnapshotSummaryCollector, Summary, _update_snapshot_summaries +from pyiceberg.table.snapshots import Operation, Snapshot, SnapshotSummaryCollector, Summary, update_snapshot_summaries @pytest.fixture @@ -161,7 +161,7 @@ def test_snapshot_summary_collector(data_file: DataFile) -> None: def test_merge_snapshot_summaries_empty() -> None: - assert _update_snapshot_summaries(Summary(Operation.APPEND)) == Summary( + assert update_snapshot_summaries(Summary(Operation.APPEND)) == Summary( operation=Operation.APPEND, **{ 'total-data-files': '0', @@ -175,7 +175,7 @@ def test_merge_snapshot_summaries_empty() -> None: def test_merge_snapshot_summaries_new_summary() -> None: - actual = _update_snapshot_summaries( + actual = update_snapshot_summaries( summary=Summary( operation=Operation.APPEND, **{ @@ -211,7 +211,7 @@ def test_merge_snapshot_summaries_new_summary() -> None: def test_merge_snapshot_summaries_overwrite_summary() -> None: - actual = _update_snapshot_summaries( + actual = update_snapshot_summaries( summary=Summary( operation=Operation.OVERWRITE, **{ @@ -260,17 +260,17 @@ def test_merge_snapshot_summaries_overwrite_summary() -> None: def test_invalid_operation() -> None: with pytest.raises(ValueError) as e: - _update_snapshot_summaries(summary=Summary(Operation.REPLACE)) + update_snapshot_summaries(summary=Summary(Operation.REPLACE)) assert "Operation not implemented: Operation.REPLACE" in str(e.value) with pytest.raises(ValueError) as e: - _update_snapshot_summaries(summary=Summary(Operation.DELETE)) + update_snapshot_summaries(summary=Summary(Operation.DELETE)) assert "Operation not implemented: Operation.DELETE" in str(e.value) def test_invalid_type() -> None: with pytest.raises(ValueError) as e: - _update_snapshot_summaries( + update_snapshot_summaries( summary=Summary( operation=Operation.OVERWRITE, **{ diff --git a/tests/test_conversions.py b/tests/test_conversions.py index 3b3e519579..f57998aa4e 100644 --- a/tests/test_conversions.py +++ b/tests/test_conversions.py @@ -71,6 +71,7 @@ - -4.5F is -1 * 2ˆ2 * 1.125 and encoded as 11000000|10010000|0...0 in binary - 00000000 -> 0, 00000000 -> 0, 10010000 -> 144 (-112), 11000000 -> 192 (-64), """ + import struct import uuid from datetime import ( diff --git a/tests/test_schema.py b/tests/test_schema.py index 8e34423cf9..7e10dd5b0d 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -16,20 +16,22 @@ # under the License. from textwrap import dedent -from typing import Any, Dict +from typing import Any, Dict, List +import pyarrow as pa import pytest from pyiceberg import schema -from pyiceberg.exceptions import ResolveError -from pyiceberg.expressions import Accessor +from pyiceberg.exceptions import ResolveError, ValidationError from pyiceberg.schema import ( + Accessor, Schema, build_position_accessors, promote, prune_columns, sanitize_column_names, ) +from pyiceberg.table import UpdateSchema from pyiceberg.typedef import EMPTY_DICT, StructProtocol from pyiceberg.types import ( BinaryType, @@ -45,6 +47,7 @@ LongType, MapType, NestedField, + PrimitiveType, StringType, StructType, TimestampType, @@ -912,3 +915,688 @@ def test_promotion(file_type: IcebergType, read_type: IcebergType) -> None: else: with pytest.raises(ResolveError): promote(file_type, read_type) + + +@pytest.fixture() +def primitive_fields() -> List[NestedField]: + return [ + NestedField(field_id=1, name=str(primitive_type), field_type=primitive_type, required=False) + for primitive_type in TEST_PRIMITIVE_TYPES + ] + + +def test_add_top_level_primitives(primitive_fields: NestedField) -> None: + for primitive_field in primitive_fields: + new_schema = Schema(primitive_field) + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore + assert applied == new_schema + + +def test_add_top_level_list_of_primitives(primitive_fields: NestedField) -> None: + for primitive_type in TEST_PRIMITIVE_TYPES: + new_schema = Schema( + NestedField( + field_id=1, + name="aList", + field_type=ListType(element_id=2, element_type=primitive_type, element_required=False), + required=False, + ) + ) + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_top_level_map_of_primitives(primitive_fields: NestedField) -> None: + for primitive_type in TEST_PRIMITIVE_TYPES: + new_schema = Schema( + NestedField( + field_id=1, + name="aMap", + field_type=MapType( + key_id=2, key_type=primitive_type, value_id=3, value_type=primitive_type, value_required=False + ), + required=False, + ) + ) + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_top_struct_of_primitives(primitive_fields: NestedField) -> None: + for primitive_type in TEST_PRIMITIVE_TYPES: + new_schema = Schema( + NestedField( + field_id=1, + name="aStruct", + field_type=StructType(NestedField(field_id=2, name="primitive", field_type=primitive_type, required=False)), + required=False, + ) + ) + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_nested_primitive(primitive_fields: NestedField) -> None: + for primitive_type in TEST_PRIMITIVE_TYPES: + current_schema = Schema(NestedField(field_id=1, name="aStruct", field_type=StructType(), required=False)) + new_schema = Schema( + NestedField( + field_id=1, + name="aStruct", + field_type=StructType(NestedField(field_id=2, name="primitive", field_type=primitive_type, required=False)), + required=False, + ) + ) + applied = UpdateSchema(None, None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore + assert applied.as_struct() == new_schema.as_struct() + + +def _primitive_fields(types: List[PrimitiveType], start_id: int = 0) -> List[NestedField]: + fields = [] + for iceberg_type in types: + fields.append(NestedField(field_id=start_id, name=str(iceberg_type), field_type=iceberg_type, required=False)) + start_id = start_id + 1 + + return fields + + +def test_add_nested_primitives(primitive_fields: NestedField) -> None: + current_schema = Schema(NestedField(field_id=1, name="aStruct", field_type=StructType(), required=False)) + new_schema = Schema( + NestedField( + field_id=1, name="aStruct", field_type=StructType(*_primitive_fields(TEST_PRIMITIVE_TYPES, 2)), required=False + ) + ) + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_nested_lists(primitive_fields: NestedField) -> None: + new_schema = Schema( + NestedField( + field_id=1, + name="aList", + type=ListType( + element_id=2, + element_type=ListType( + element_id=3, + element_type=ListType( + element_id=4, + element_type=ListType( + element_id=5, + element_type=ListType( + element_id=6, + element_type=ListType( + element_id=7, + element_type=ListType( + element_id=8, + element_type=ListType(element_id=9, element_type=DecimalType(precision=11, scale=20)), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + required=False, + ) + ) + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_nested_struct(primitive_fields: NestedField) -> None: + new_schema = Schema( + NestedField( + field_id=1, + name="struct1", + type=StructType( + NestedField( + field_id=2, + name="struct2", + type=StructType( + NestedField( + field_id=3, + name="struct3", + type=StructType( + NestedField( + field_id=4, + name="struct4", + type=StructType( + NestedField( + field_id=5, + name="struct5", + type=StructType( + NestedField( + field_id=6, + name="struct6", + type=StructType( + NestedField(field_id=7, name="aString", field_type=StringType()) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_nested_maps(primitive_fields: NestedField) -> None: + new_schema = Schema( + NestedField( + field_id=1, + name="struct", + field_type=MapType( + key_id=2, + value_id=3, + key_type=StringType(), + value_type=MapType( + key_id=4, + value_id=5, + key_type=StringType(), + value_type=MapType( + key_id=6, + value_id=7, + key_type=StringType(), + value_type=MapType( + key_id=8, + value_id=9, + key_type=StringType(), + value_type=MapType( + key_id=10, + value_id=11, + key_type=StringType(), + value_type=MapType(key_id=12, value_id=13, key_type=StringType(), value_type=StringType()), + value_required=False, + ), + value_required=False, + ), + value_required=False, + ), + value_required=False, + ), + value_required=False, + ), + required=False, + ) + ) + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore + assert applied.as_struct() == new_schema.as_struct() + + +def test_detect_invalid_top_level_list() -> None: + current_schema = Schema( + NestedField( + field_id=1, + name="aList", + field_type=ListType(element_id=2, element_type=StringType(), element_required=False), + required=False, + ) + ) + new_schema = Schema( + NestedField( + field_id=1, + name="aList", + field_type=ListType(element_id=2, element_type=DoubleType(), element_required=False), + required=False, + ) + ) + + with pytest.raises(ValidationError, match="Cannot change column type: aList.element: string -> double"): + _ = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore + + +def test_detect_invalid_top_level_maps() -> None: + current_schema = Schema( + NestedField( + field_id=1, + name="aMap", + field_type=MapType(key_id=2, value_id=3, key_type=StringType(), value_type=StringType(), value_required=False), + required=False, + ) + ) + new_schema = Schema( + NestedField( + field_id=1, + name="aMap", + field_type=MapType(key_id=2, value_id=3, key_type=UUIDType(), value_type=StringType(), value_required=False), + required=False, + ) + ) + + with pytest.raises(ValidationError, match="Cannot change column type: aMap.key: string -> uuid"): + _ = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore + + +def test_promote_float_to_double() -> None: + current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=FloatType(), required=False)) + new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DoubleType(), required=False)) + + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore + + assert applied.as_struct() == new_schema.as_struct() + assert len(applied.fields) == 1 + assert isinstance(applied.fields[0].field_type, DoubleType) + + +def test_detect_invalid_promotion_double_to_float() -> None: + current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DoubleType(), required=False)) + new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=FloatType(), required=False)) + + with pytest.raises(ValidationError, match="Cannot change column type: aCol: double -> float"): + _ = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore + + +# decimal(P,S) Fixed-point decimal; precision P, scale S -> Scale is fixed [1], +# precision must be 38 or less +def test_type_promote_decimal_to_fixed_scale_with_wider_precision() -> None: + current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DecimalType(precision=20, scale=1), required=False)) + new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DecimalType(precision=22, scale=1), required=False)) + + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore + + assert applied.as_struct() == new_schema.as_struct() + assert len(applied.fields) == 1 + field = applied.fields[0] + decimal_type = field.field_type + assert isinstance(decimal_type, DecimalType) + assert decimal_type.precision == 22 + assert decimal_type.scale == 1 + + +def test_add_nested_structs(primitive_fields: NestedField) -> None: + schema = Schema( + NestedField( + field_id=1, + name="struct1", + field_type=StructType( + NestedField( + field_id=2, + name="struct2", + field_type=StructType( + NestedField( + field_id=3, + name="list", + field_type=ListType( + element_id=4, + element_type=StructType( + NestedField(field_id=5, name="value", field_type=StringType(), required=False) + ), + element_required=False, + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + new_schema = Schema( + NestedField( + field_id=1, + name="struct1", + field_type=StructType( + NestedField( + field_id=2, + name="struct2", + field_type=StructType( + NestedField( + field_id=3, + name="list", + field_type=ListType( + element_id=4, + element_type=StructType( + NestedField(field_id=5, name="time", field_type=TimeType(), required=False) + ), + element_required=False, + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + applied = UpdateSchema(transaction=None, schema=schema).union_by_name(new_schema)._apply() # type: ignore + + expected = Schema( + NestedField( + field_id=1, + name="struct1", + field_type=StructType( + NestedField( + field_id=2, + name="struct2", + field_type=StructType( + NestedField( + field_id=3, + name="list", + field_type=ListType( + element_id=4, + element_type=StructType( + NestedField(field_id=5, name="value", field_type=StringType(), required=False), + NestedField(field_id=6, name="time", field_type=TimeType(), required=False), + ), + element_required=False, + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + + assert applied.as_struct() == expected.as_struct() + + +def test_replace_list_with_primitive() -> None: + current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=ListType(element_id=2, element_type=StringType()))) + new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=StringType())) + + with pytest.raises(ValidationError, match="Cannot change column type: list is not a primitive"): + _ = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore + + +def test_mirrored_schemas() -> None: + current_schema = Schema( + NestedField(9, "struct1", StructType(NestedField(8, "string1", StringType(), required=False)), required=False), + NestedField(6, "list1", ListType(element_id=7, element_type=StringType(), element_required=False), required=False), + NestedField(5, "string2", StringType(), required=False), + NestedField(4, "string3", StringType(), required=False), + NestedField(3, "string4", StringType(), required=False), + NestedField(2, "string5", StringType(), required=False), + NestedField(1, "string6", StringType(), required=False), + ) + mirrored_schema = Schema( + NestedField(1, "struct1", StructType(NestedField(2, "string1", StringType(), required=False))), + NestedField(3, "list1", ListType(element_id=4, element_type=StringType(), element_required=False), required=False), + NestedField(5, "string2", StringType(), required=False), + NestedField(6, "string3", StringType(), required=False), + NestedField(7, "string4", StringType(), required=False), + NestedField(8, "string5", StringType(), required=False), + NestedField(9, "string6", StringType(), required=False), + ) + + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(mirrored_schema)._apply() # type: ignore + + assert applied.as_struct() == current_schema.as_struct() + + +def test_add_new_top_level_struct() -> None: + current_schema = Schema( + NestedField( + 1, + "map1", + MapType( + key_id=2, + value_id=3, + key_type=StringType(), + value_type=ListType( + element_id=4, + element_type=StructType(NestedField(field_id=5, name="string", field_type=StringType(), required=False)), + ), + value_required=False, + ), + ) + ) + observed_schema = Schema( + NestedField( + 1, + "map1", + MapType( + key_id=2, + value_id=3, + key_type=StringType(), + value_type=ListType( + element_id=4, + element_type=StructType(NestedField(field_id=5, name="string", field_type=StringType(), required=False)), + ), + value_required=False, + ), + ), + NestedField( + field_id=6, + name="struct1", + field_type=StructType( + NestedField( + field_id=7, + name="d1", + field_type=StructType(NestedField(field_id=8, name="d2", field_type=StringType(), required=False)), + required=False, + ) + ), + required=False, + ), + ) + + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(observed_schema)._apply() # type: ignore + + assert applied.as_struct() == observed_schema.as_struct() + + +def test_append_nested_struct() -> None: + current_schema = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType(NestedField(field_id=4, name="s4", field_type=StringType(), required=False)), + ) + ), + required=False, + ) + ), + ) + ) + observed_schema = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType(NestedField(field_id=4, name="s4", field_type=StringType(), required=False)), + required=False, + ), + NestedField( + field_id=5, + name="repeat", + field_type=StructType( + NestedField( + field_id=6, + name="s1", + field_type=StructType( + NestedField( + field_id=7, + name="s2", + field_type=StructType( + NestedField( + field_id=8, + name="s3", + field_type=StructType( + NestedField(field_id=9, name="s4", field_type=StringType()) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ), + required=False, + ), + required=False, + ) + ), + required=False, + ) + ) + + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(observed_schema)._apply() # type: ignore + + assert applied.as_struct() == observed_schema.as_struct() + + +def test_append_nested_lists() -> None: + current_schema = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType( + NestedField( + field_id=4, + name="list1", + field_type=ListType(element_id=5, element_type=StringType(), element_required=False), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + + observed_schema = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType( + NestedField( + field_id=4, + name="list2", + field_type=ListType(element_id=5, element_type=StringType(), element_required=False), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + union = UpdateSchema(transaction=None, schema=current_schema).union_by_name(observed_schema)._apply() # type: ignore + + expected = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType( + NestedField( + field_id=4, + name="list1", + field_type=ListType(element_id=5, element_type=StringType(), element_required=False), + required=False, + ), + NestedField( + field_id=6, + name="list2", + field_type=ListType(element_id=7, element_type=StringType(), element_required=False), + required=False, + ), + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + + assert union.as_struct() == expected.as_struct() + + +def test_union_with_pa_schema(primitive_fields: NestedField) -> None: + base_schema = Schema(NestedField(field_id=1, name="foo", field_type=StringType(), required=True)) + + pa_schema = pa.schema([ + pa.field("foo", pa.string(), nullable=False), + pa.field("bar", pa.int32(), nullable=True), + pa.field("baz", pa.bool_(), nullable=True), + ]) + + new_schema = UpdateSchema(transaction=None, schema=base_schema).union_by_name(pa_schema)._apply() # type: ignore + + expected_schema = Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=True), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=False), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + ) + + assert new_schema == expected_schema diff --git a/tests/test_serializers.py b/tests/test_serializers.py new file mode 100644 index 0000000000..140db02700 --- /dev/null +++ b/tests/test_serializers.py @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import json +import os +import uuid +from typing import Any, Dict + +import pytest +from pytest_mock import MockFixture + +from pyiceberg.serializers import ToOutputFile +from pyiceberg.table import StaticTable +from pyiceberg.table.metadata import TableMetadataV1 + + +def test_legacy_current_snapshot_id( + mocker: MockFixture, tmp_path_factory: pytest.TempPathFactory, example_table_metadata_no_snapshot_v1: Dict[str, Any] +) -> None: + from pyiceberg.io.pyarrow import PyArrowFileIO + + metadata_location = str(tmp_path_factory.mktemp("metadata") / f"{uuid.uuid4()}.metadata.json") + metadata = TableMetadataV1(**example_table_metadata_no_snapshot_v1) + ToOutputFile.table_metadata(metadata, PyArrowFileIO().new_output(location=metadata_location), overwrite=True) + static_table = StaticTable.from_metadata(metadata_location) + assert static_table.metadata.current_snapshot_id is None + + mocker.patch.dict(os.environ, values={"PYICEBERG_LEGACY_CURRENT_SNAPSHOT_ID": "True"}) + + ToOutputFile.table_metadata(metadata, PyArrowFileIO().new_output(location=metadata_location), overwrite=True) + with PyArrowFileIO().new_input(location=metadata_location).open() as input_stream: + metadata_json_bytes = input_stream.read() + assert json.loads(metadata_json_bytes)['current-snapshot-id'] == -1 + backwards_compatible_static_table = StaticTable.from_metadata(metadata_location) + assert backwards_compatible_static_table.metadata.current_snapshot_id is None + assert backwards_compatible_static_table.metadata == static_table.metadata diff --git a/tests/test_transforms.py b/tests/test_transforms.py index ed0c495a14..4fea7739d1 100644 --- a/tests/test_transforms.py +++ b/tests/test_transforms.py @@ -236,6 +236,19 @@ def test_negative_value_to_human_string(negative_value: int, time_transform: Tim assert time_transform.to_human_string(TimestampType(), negative_value) == expected +@pytest.mark.parametrize( + "zero_value,time_transform,expected", + [ + (0, YearTransform(), "1970"), + (0, MonthTransform(), "1970-01"), + (0, DayTransform(), "1970-01-01"), + (0, HourTransform(), "1970-01-01-00"), + ], +) +def test_zero_value_to_human_string(zero_value: int, time_transform: TimeTransform[Any], expected: str) -> None: + assert time_transform.to_human_string(TimestampType(), zero_value) == expected + + @pytest.mark.parametrize( "type_var", [ @@ -274,6 +287,12 @@ def test_time_methods(type_var: PrimitiveType) -> None: (MonthTransform(), TimestamptzType(), -1, -1), (DayTransform(), TimestampType(), 1512151975038194, 17501), (DayTransform(), TimestampType(), -1, -1), + (YearTransform(), DateType(), 0, 0), + (MonthTransform(), DateType(), 0, 0), + (DayTransform(), DateType(), 0, 0), + (YearTransform(), TimestampType(), 0, 0), + (MonthTransform(), TimestampType(), 0, 0), + (DayTransform(), TimestampType(), 0, 0), ], ) def test_time_apply_method(transform: TimeTransform[Any], type_var: PrimitiveType, value: int, expected: int) -> None: @@ -291,6 +310,7 @@ def test_hour_method(type_var: PrimitiveType) -> None: assert HourTransform().can_transform(type_var) assert HourTransform().result_type(type_var) == IntegerType() assert HourTransform().transform(type_var)(1512151975038194) == 420042 # type: ignore + assert HourTransform().transform(type_var)(0) == 0 # type: ignore assert HourTransform().dedup_name == "time" @@ -362,7 +382,7 @@ def test_identity_method(type_var: PrimitiveType) -> None: @pytest.mark.parametrize("type_var", [IntegerType(), LongType()]) @pytest.mark.parametrize( "input_var,expected", - [(1, 0), (5, 0), (9, 0), (10, 10), (11, 10), (-1, -10), (-10, -10), (-12, -20)], + [(1, 0), (5, 0), (9, 0), (10, 10), (11, 10), (-1, -10), (-10, -10), (-12, -20), (0, 0)], ) def test_truncate_integer(type_var: PrimitiveType, input_var: int, expected: int) -> None: trunc = TruncateTransform(10) # type: ignore @@ -377,6 +397,7 @@ def test_truncate_integer(type_var: PrimitiveType, input_var: int, expected: int (Decimal("12.29"), Decimal("12.20")), (Decimal("0.05"), Decimal("0.00")), (Decimal("-0.05"), Decimal("-0.10")), + (Decimal("0.0"), Decimal("0.0")), ], ) def test_truncate_decimal(input_var: Decimal, expected: Decimal) -> None: @@ -384,7 +405,7 @@ def test_truncate_decimal(input_var: Decimal, expected: Decimal) -> None: assert trunc.transform(DecimalType(9, 2))(input_var) == expected -@pytest.mark.parametrize("input_var,expected", [("abcdefg", "abcde"), ("abc", "abc")]) +@pytest.mark.parametrize("input_var,expected", [("abcdefg", "abcde"), ("abc", "abc"), ("", "")]) def test_truncate_string(input_var: str, expected: str) -> None: trunc = TruncateTransform(5) # type: ignore assert trunc.transform(StringType())(input_var) == expected @@ -724,9 +745,7 @@ def test_projection_day_human(bound_reference_date: BoundReference[int]) -> None assert DayTransform().project( "dt", BoundGreaterThanOrEqual(term=bound_reference_date, literal=date_literal) - ) == GreaterThanOrEqual( - term="dt", literal=17532 - ) # >= 2018, 1, 1 + ) == GreaterThanOrEqual(term="dt", literal=17532) # >= 2018, 1, 1 assert DayTransform().project("dt", BoundGreaterThan(term=bound_reference_date, literal=date_literal)) == GreaterThanOrEqual( term="dt", literal=17533 diff --git a/tests/utils/test_config.py b/tests/utils/test_config.py index d694e15562..2f15bb56d8 100644 --- a/tests/utils/test_config.py +++ b/tests/utils/test_config.py @@ -41,6 +41,20 @@ def test_from_environment_variables_uppercase() -> None: assert Config().get_catalog_config("PRODUCTION") == {"uri": "https://service.io/api"} +@mock.patch.dict( + os.environ, + { + "PYICEBERG_CATALOG__PRODUCTION__S3__REGION": "eu-north-1", + "PYICEBERG_CATALOG__PRODUCTION__S3__ACCESS_KEY_ID": "username", + }, +) +def test_fix_nested_objects_from_environment_variables() -> None: + assert Config().get_catalog_config("PRODUCTION") == { + 's3.region': 'eu-north-1', + 's3.access-key-id': 'username', + } + + def test_from_configuration_files(tmp_path_factory: pytest.TempPathFactory) -> None: config_path = str(tmp_path_factory.mktemp("config")) with open(f"{config_path}/.pyiceberg.yaml", "w", encoding=UTF8) as file: @@ -62,3 +76,20 @@ def test_merge_config() -> None: rhs: RecursiveDict = {"common_key": "xyz789"} result = merge_config(lhs, rhs) assert result["common_key"] == rhs["common_key"] + + +def test_from_configuration_files_get_typed_value(tmp_path_factory: pytest.TempPathFactory) -> None: + config_path = str(tmp_path_factory.mktemp("config")) + with open(f"{config_path}/.pyiceberg.yaml", "w", encoding=UTF8) as file: + yaml_str = as_document({"max-workers": "4", "legacy-current-snapshot-id": "True"}).as_yaml() + file.write(yaml_str) + + os.environ["PYICEBERG_HOME"] = config_path + with pytest.raises(ValueError): + Config().get_bool("max-workers") + + with pytest.raises(ValueError): + Config().get_int("legacy-current-snapshot-id") + + assert Config().get_bool("legacy-current-snapshot-id") + assert Config().get_int("max-workers") == 4 diff --git a/tests/utils/test_manifest.py b/tests/utils/test_manifest.py index 08906b68ad..3e789cb854 100644 --- a/tests/utils/test_manifest.py +++ b/tests/utils/test_manifest.py @@ -37,8 +37,7 @@ ) from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.table import Snapshot -from pyiceberg.table.snapshots import Operation, Summary +from pyiceberg.table.snapshots import Operation, Snapshot, Summary from pyiceberg.transforms import IdentityTransform from pyiceberg.typedef import Record from pyiceberg.types import IntegerType, NestedField @@ -55,14 +54,19 @@ def _verify_metadata_with_fastavro(avro_file: str, expected_metadata: Dict[str, def test_read_manifest_entry(generated_manifest_entry_file: str) -> None: manifest = ManifestFile( - manifest_path=generated_manifest_entry_file, manifest_length=0, partition_spec_id=0, sequence_number=None, partitions=[] + manifest_path=generated_manifest_entry_file, + manifest_length=0, + partition_spec_id=0, + added_snapshot_id=0, + sequence_number=0, + partitions=[], ) manifest_entries = manifest.fetch_manifest_entry(PyArrowFileIO()) manifest_entry = manifest_entries[0] assert manifest_entry.status == ManifestEntryStatus.ADDED assert manifest_entry.snapshot_id == 8744736658442914487 - assert manifest_entry.data_sequence_number is None + assert manifest_entry.data_sequence_number == 0 assert isinstance(manifest_entry.data_file, DataFile) data_file = manifest_entry.data_file