diff --git a/.asf.yaml b/.asf.yaml index a97dcd3856..15e9564420 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -28,6 +28,16 @@ github: - apache - hacktoberfest - pyiceberg + enabled_merge_buttons: + merge: false + squash: true + rebase: true + protected_branches: + main: + required_pull_request_reviews: + required_approving_review_count: 1 + + required_linear_history: true features: wiki: true issues: true diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 6902775b13..910b7ee003 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -25,7 +25,7 @@ on: version: description: 'Version' type: string - default: 'master' + default: 'main' jobs: @@ -50,18 +50,16 @@ jobs: - name: Set version run: python -m poetry version "${{ inputs.version }}" - working-directory: ./python - if: "${{ github.event.inputs.version != 'master' }}" + if: "${{ github.event.inputs.version != 'main' }}" # Publish the source distribution with the version that's in # the repository, otherwise the tests will fail - name: Compile source distribution run: python3 -m poetry build --format=sdist if: startsWith(matrix.os, 'ubuntu') - working-directory: ./python - name: Build wheels - uses: pypa/cibuildwheel@v2.16.0 + uses: pypa/cibuildwheel@v2.16.2 with: output-dir: wheelhouse config-file: "pyproject.toml" diff --git a/dev/Dockerfile b/dev/Dockerfile index a4099d3494..77ed84ed4f 100644 --- a/dev/Dockerfile +++ b/dev/Dockerfile @@ -38,7 +38,7 @@ WORKDIR ${SPARK_HOME} ENV SPARK_VERSION=3.4.1 ENV ICEBERG_SPARK_RUNTIME_VERSION=3.4_2.12 -ENV ICEBERG_VERSION=1.3.1 +ENV ICEBERG_VERSION=1.4.0 ENV AWS_SDK_VERSION=2.20.18 ENV PYICEBERG_VERSION=0.4.0 @@ -50,13 +50,9 @@ RUN curl --retry 3 -s -C - https://dlcdn.apache.org/spark/spark-${SPARK_VERSION} RUN curl -s https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-spark-runtime-${ICEBERG_SPARK_RUNTIME_VERSION}/${ICEBERG_VERSION}/iceberg-spark-runtime-${ICEBERG_SPARK_RUNTIME_VERSION}-${ICEBERG_VERSION}.jar -Lo iceberg-spark-runtime-${ICEBERG_SPARK_RUNTIME_VERSION}-${ICEBERG_VERSION}.jar \ && mv iceberg-spark-runtime-${ICEBERG_SPARK_RUNTIME_VERSION}-${ICEBERG_VERSION}.jar /opt/spark/jars -# Download Java AWS SDK -RUN curl -s https://repo1.maven.org/maven2/software/amazon/awssdk/bundle/${AWS_SDK_VERSION}/bundle-${AWS_SDK_VERSION}.jar -Lo bundle-${AWS_SDK_VERSION}.jar \ - && mv bundle-${AWS_SDK_VERSION}.jar /opt/spark/jars - -# Download URL connection client required for S3FileIO -RUN curl -s https://repo1.maven.org/maven2/software/amazon/awssdk/url-connection-client/${AWS_SDK_VERSION}/url-connection-client-${AWS_SDK_VERSION}.jar -Lo url-connection-client-${AWS_SDK_VERSION}.jar \ - && mv url-connection-client-${AWS_SDK_VERSION}.jar /opt/spark/jars +# Download AWS bundle +RUN curl -s https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-aws-bundle/${ICEBERG_VERSION}/iceberg-aws-bundle-${ICEBERG_VERSION}.jar -Lo iceberg-aws-bundle-${ICEBERG_VERSION}.jar \ + && mv iceberg-aws-bundle-${ICEBERG_VERSION}.jar /opt/spark/jars COPY spark-defaults.conf /opt/spark/conf ENV PATH="/opt/spark/sbin:/opt/spark/bin:${PATH}" diff --git a/dev/provision.py b/dev/provision.py index 56e3459edd..b75030f8a3 100644 --- a/dev/provision.py +++ b/dev/provision.py @@ -279,3 +279,25 @@ (CAST('2023-03-12' AS date), CAST('2023-03-12 12:22:00' AS timestamp), 12, 'l'); """ ) + +# There is an issue with CREATE OR REPLACE +# https://github.com/apache/iceberg/issues/8756 +spark.sql( + """ +DROP TABLE IF EXISTS default.test_table_version +""" +) + +spark.sql( + """ +CREATE TABLE default.test_table_version ( + dt date, + number integer, + letter string +) +USING iceberg +TBLPROPERTIES ( + 'format-version'='1' +); +""" +) diff --git a/dev/spark-defaults.conf b/dev/spark-defaults.conf index 28f93b15a6..56c345432a 100644 --- a/dev/spark-defaults.conf +++ b/dev/spark-defaults.conf @@ -20,7 +20,7 @@ spark.sql.catalog.demo org.apache.iceberg.spark.SparkCatalog spark.sql.catalog.demo.type rest spark.sql.catalog.demo.uri http://rest:8181 spark.sql.catalog.demo.io-impl org.apache.iceberg.aws.s3.S3FileIO -spark.sql.catalog.demo.warehouse s3a://warehouse/wh/ +spark.sql.catalog.demo.warehouse s3://warehouse/wh/ spark.sql.catalog.demo.s3.endpoint http://minio:9000 spark.sql.defaultCatalog demo spark.eventLog.enabled true diff --git a/mkdocs/docs/api.md b/mkdocs/docs/api.md index 55eadc5f5b..c19efd659d 100644 --- a/mkdocs/docs/api.md +++ b/mkdocs/docs/api.md @@ -195,9 +195,9 @@ Renaming a field in an Iceberg table is simple: ```python with table.update_schema() as update: - update.rename("retries", "num_retries") + update.rename_column("retries", "num_retries") # This will rename `confirmed_by` to `exchange` - update.rename("properties.confirmed_by", "exchange") + update.rename_column("properties.confirmed_by", "exchange") ``` ### Move column diff --git a/mkdocs/docs/contributing.md b/mkdocs/docs/contributing.md index 87a8cc701b..3973b763a0 100644 --- a/mkdocs/docs/contributing.md +++ b/mkdocs/docs/contributing.md @@ -43,7 +43,7 @@ If you want to install the library on the host, you can simply run `pip3 install To set up IDEA with Poetry ([also on Loom](https://www.loom.com/share/6d36464d45f244729d91003e7f671fd2)): - Open up the Python project in IntelliJ -- Make sure that you're on latest master (that includes Poetry) +- Make sure that you're on latest main (that includes Poetry) - Go to File -> Project Structure (⌘;) - Go to Platform Settings -> SDKs - Click the + sign -> Add Python SDK diff --git a/mkdocs/docs/how-to-release.md b/mkdocs/docs/how-to-release.md index 32e8744ac7..db8f80665e 100644 --- a/mkdocs/docs/how-to-release.md +++ b/mkdocs/docs/how-to-release.md @@ -54,10 +54,10 @@ Both the source distribution (`sdist`) and the binary distributions (`wheels`) n Before committing the files to the Apache SVN artifact distribution SVN hashes need to be generated, and those need to be signed with gpg to make sure that they are authentic. -Go to [Github Actions and run the `Python release` action](https://github.com/apache/iceberg/actions/workflows/python-release.yml). **Set the version to master, since we cannot modify the source**. Download the zip, and sign the files: +Go to [Github Actions and run the `Python release` action](https://github.com/apache/iceberg/actions/workflows/python-release.yml). **Set the version to main, since we cannot modify the source**. Download the zip, and sign the files: ```bash -cd release-master/ +cd release-main/ for name in $(ls pyiceberg-*.whl pyiceberg-*.tar.gz) do diff --git a/mkdocs/mkdocs.yml b/mkdocs/mkdocs.yml index 5f35129c6b..90892ac73b 100644 --- a/mkdocs/mkdocs.yml +++ b/mkdocs/mkdocs.yml @@ -17,8 +17,8 @@ --- site_name: PyIceberg site_url: https://py.iceberg.apache.org/ -repo_url: "https://github.com/apache/iceberg/tree/master/python" -repo_name: "apache/iceberg/python" +repo_url: "https://github.com/apache/iceberg-python" +repo_name: "apache/iceberg-python" plugins: - gen-files: diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index e810136a3a..80c414c7c4 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -16,10 +16,10 @@ # under the License. mkdocs==1.5.3 -griffe==0.36.4 +griffe==0.36.5 jinja2==3.1.2 mkdocstrings==0.23.0 -mkdocstrings-python==1.7.1 +mkdocstrings-python==1.7.3 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 diff --git a/poetry.lock b/poetry.lock index 597fecd0bd..ae8b2dbf0a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -576,63 +576,63 @@ files = [ [[package]] name = "coverage" -version = "7.3.1" +version = "7.3.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, - {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, - {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, - {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, - {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, - {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, - {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, - {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, - {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, - {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, - {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, - {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, - {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, - {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, - {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, - {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, - {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, - {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, - {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, - {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, - {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, - {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, - {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, - {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, - {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, - {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, - {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, - {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, - {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, - {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, - {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, - {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, - {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, - {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, - {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, - {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, - {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, - {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, - {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, - {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, - {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, - {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, - {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, - {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, - {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, - {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, - {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, - {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, - {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, - {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, - {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, - {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, ] [package.dependencies] @@ -688,69 +688,69 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "cython" -version = "3.0.2" +version = "3.0.3" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "Cython-3.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8ccb91d2254e34724f1541b2a6fcdfacdb88284185b0097ae84e0ddf476c7a38"}, - {file = "Cython-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c298b1589205ecaaed0457ad05e0c8a43e7db2053607f48ed4a899cb6aa114df"}, - {file = "Cython-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e825e682cef76d0c33384f38b56b7e87c76152482a914dfc78faed6ff66ce05a"}, - {file = "Cython-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77ec0134fc1b10aebef2013936a91c07bff2498ec283bc2eca099ee0cb94d12e"}, - {file = "Cython-3.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c90eeb94395315e65fd758a2f86b92904fce7b50060b4d45a878ef6767f9276e"}, - {file = "Cython-3.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:38085523fa7a299638d051ae08144222785639882f6291bd275c0b12db1034ff"}, - {file = "Cython-3.0.2-cp310-cp310-win32.whl", hash = "sha256:b032cb0c69082f0665b2c5fb416d041157062f1538336d0edf823b9ee500e39c"}, - {file = "Cython-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:067b2b9eb487bd61367b296f11b7c1c70a084b3eb7d5a572f607cd1fc5ca5586"}, - {file = "Cython-3.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:213ff9f95de319e54b520bf31edd6aa7a1fa4fbf617c2beb0f92362595e6476a"}, - {file = "Cython-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bebbca13078125a35937966137af4bd0300a0c66fd7ae4ce36adc049b13bdf3"}, - {file = "Cython-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e5587128e8c2423aefcffa4ded4ddf60d44898938fbb7c0f236636a750a94f"}, - {file = "Cython-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e2853d484643c6b7ac3bdb48392753442da1c71b689468fa3176b619bebe54"}, - {file = "Cython-3.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e722732e9aa9bde667ed6d87525234823eb7766ca234cfb19d7e0c095a2ef4"}, - {file = "Cython-3.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:989787fc24a95100a26918b6577d06e15a8868a3ed267009c5cfcf1a906179ac"}, - {file = "Cython-3.0.2-cp311-cp311-win32.whl", hash = "sha256:d21801981db44b7e9f9768f121317946461d56b51de1e6eff3c42e8914048696"}, - {file = "Cython-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:809617cf4825b2138ce0ec827e1f28e39668743c81ac8286373f8d148c05f088"}, - {file = "Cython-3.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5682293d344b7dbad97ce6eceb9e887aca6e53499709db9da726ca3424e5559d"}, - {file = "Cython-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e08ff5da5f5b969639784b1bffcd880a0c0f048d182aed7cba9945ee8b367c2"}, - {file = "Cython-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8850269ff59f77a1629e26d0576701925360d732011d6d3516ccdc5b2c2bc310"}, - {file = "Cython-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:550b3fbe9b3c555b44ded934f4822f9fcc04dfcee512167ebcbbd370ccede20e"}, - {file = "Cython-3.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4db017b104f47b1185237702f6ed2651839c8124614683efa7c489f3fa4e19d9"}, - {file = "Cython-3.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:75a2395cc7b78cff59be6e9b7f92bbb5d7b8d25203f6d3fb6f72bdb7d3f49777"}, - {file = "Cython-3.0.2-cp312-cp312-win32.whl", hash = "sha256:786b6034a91e886116bb562fe42f8bf0f97c3e00c02e56791d02675959ed65b1"}, - {file = "Cython-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc9d173ab8b167cae674f6deed8c65ba816574797a2bd6d8aa623277d1fa81ca"}, - {file = "Cython-3.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8948504338d7a140ce588333177dcabf0743a68dbc83b0174f214f5b959634d5"}, - {file = "Cython-3.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51efba0e136b2af358e5a347bae09678b17460c35cf1eab24f0476820348991"}, - {file = "Cython-3.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05cb2a73810f045d328b7579cf98f550a9e601df5e282d1fea0512d8ad589011"}, - {file = "Cython-3.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22ba78e48bdb65977928ecb275ac8c82df7b0eefa075078a1363a5af4606b42e"}, - {file = "Cython-3.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:302281b927409b3e0ef8cd9251eab782cf1acd2578eab305519fbae5d184b7e9"}, - {file = "Cython-3.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a1c3675394b81024aaf56e4f53c2b4f81d9a116c7049e9d4706f810899c9134e"}, - {file = "Cython-3.0.2-cp36-cp36m-win32.whl", hash = "sha256:34f7b014ebce5d325c8084e396c81cdafbd8d82be56780dffe6b67b28c891f1b"}, - {file = "Cython-3.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:477cd3549597f09a1608da7b05e16ba641e9aedd171b868533a5a07790ed886f"}, - {file = "Cython-3.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a49dde9f9e29ea82f29aaf3bb1a270b6eb90b75d627c7ff2f5dd3764540ae646"}, - {file = "Cython-3.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc1c8013fad0933f5201186eccc5f2be223cafd6a8dcd586d3f7bb6ba84dc845"}, - {file = "Cython-3.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b75e9c9d7ad7c9dd85d45241d1d4e3c5f66079c1f84eec91689c26d98bc3349"}, - {file = "Cython-3.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f43c4d3ecd9e3b8b7afe834e519f55cf4249b1088f96d11b96f02c55cbaeff7"}, - {file = "Cython-3.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:dab6a923e21e212aa3dc6dde9b22a190f5d7c449315a94e57ddc019ea74a979b"}, - {file = "Cython-3.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae453cfa933b919c0a19d2cc5dc9fb28486268e95dc2ab7a11ab7f99cf8c3883"}, - {file = "Cython-3.0.2-cp37-cp37m-win32.whl", hash = "sha256:b1f023d36a3829069ed11017c670128be3f135a9c17bd64c35d3b3442243b05c"}, - {file = "Cython-3.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:011c4e0b75baee1843334562487eb4fbc0c59ddb2cc32a978b972a81eedcbdcc"}, - {file = "Cython-3.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:832bbee87bca760efeae248ddf19ccd77f9a2355cb6f8a64f20cc377e56957b3"}, - {file = "Cython-3.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fe806d154b6b7f0ab746dac36c022889e2e7cf47546ff9afdc29a62cfa692d0"}, - {file = "Cython-3.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e486331a29e7700b1ad5f4f753bef483c81412a5e64a873df46d6cb66f9a65de"}, - {file = "Cython-3.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54d41a1dfbaab74449873e7f8e6cd4239850fe7a50f7f784dd99a560927f3bac"}, - {file = "Cython-3.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4dca13c86d6cd523c7d8bbf8db1b2bbf8faedd0addedb229158d8015ad1819e1"}, - {file = "Cython-3.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:10cbfb37f31938371a6213cc8b5459c639954aed053efeded3c012d4c5915db9"}, - {file = "Cython-3.0.2-cp38-cp38-win32.whl", hash = "sha256:e663c237579c033deaa2cb362b74651da7712f56e441c11382510a8c4c4f2dd7"}, - {file = "Cython-3.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:2f84bd6cefa5130750c492038170c44f1cbd6f42e9ed85e168fd9cb453f85160"}, - {file = "Cython-3.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f37e4287f520f3748a06ad5eaae09ba4ac68f52e155d70de5f75780d83575c43"}, - {file = "Cython-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd30826ca8b27b2955a63c8ffe8aacc9f0779582b4bd154cf7b441ac10dae2cb"}, - {file = "Cython-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08d67c7225a09eeb77e090c8d4f60677165b052ccf76e3a57d8237064e5c2de2"}, - {file = "Cython-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e625eec8c5c9a8cb062a318b257cc469d301bed952c7daf86e38bbd3afe7c91"}, - {file = "Cython-3.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1b12a8f23270675b537d1c3b988f845bea4bbcc66ae0468857f5ede0526d4522"}, - {file = "Cython-3.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:62dd78afdf748a58dae9c9b9c42a1519ae30787b28ce5f84a0e1bb54144142ca"}, - {file = "Cython-3.0.2-cp39-cp39-win32.whl", hash = "sha256:d0d0cc4ecc05f41c5e02af14ac0083552d22efed976f79eb7bade55fed63b25d"}, - {file = "Cython-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:147cc1d3dda8b06de9d86df5e59cdf15f0a522620168b7349a5ec88b48104d7d"}, - {file = "Cython-3.0.2-py2.py3-none-any.whl", hash = "sha256:8f1c9e4b8e413da211dd7942440cf410ff0eafb081309e04e81f4fafbb146bf2"}, - {file = "Cython-3.0.2.tar.gz", hash = "sha256:9594818dca8bb22ae6580c5222da2bc5cc32334350bd2d294a00d8669bcc61b5"}, + {file = "Cython-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85073ab414ff432d2a39d36cb49c39ce69f30b53daccc7699bfad0ce3d1b539a"}, + {file = "Cython-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30c1d9bd2bcb9b1a195dd23b359771857df8ebd4a1038fb37dd155d3ea38c09c"}, + {file = "Cython-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9296f332523d5c550ebae694483874d255264cff3281372f25ea5f2739b96651"}, + {file = "Cython-3.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52ed47edbf48392dd0f419135e7ff59673f6b32d27d3ffc9e61a515571c050d"}, + {file = "Cython-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6f63e959d13775472d37e731b2450d120e8db87e956e2de74475e8f17a89b1fb"}, + {file = "Cython-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22d268c3023f405e13aa0c1600389794694ab3671614f8e782d89a1055da0858"}, + {file = "Cython-3.0.3-cp310-cp310-win32.whl", hash = "sha256:51850f277660f67171135515e45edfc8815f723ff20768e39cb9785b2671062f"}, + {file = "Cython-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bff1fec968a6b2ca452ae9bff6d6d0bf8486427d4d791e85543240266b6915e0"}, + {file = "Cython-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587d664ff6bd5b03611ddc6ef320b7f8677d824c45d15553f16a69191a643843"}, + {file = "Cython-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3192cd780435fca5ae5d79006b48cbf0ea674853b5a7b0055a122045bff9d84e"}, + {file = "Cython-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7578b59ffd0d9c95ae6f7ae852309918915998b7fe0ed2f8725a683de8da276"}, + {file = "Cython-3.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f05889eb1b5a95a7adf97303279c2d13819ff62292e10337e6c940dbf570b5d"}, + {file = "Cython-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1d3416c24a1b7bf3a2d9615a7f9f12b00fac0b94fb2e61449e0c1ecf20d6ed52"}, + {file = "Cython-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4cc0f7244da06fdc6a4a7240df788805436b6fb7f20edee777eb77777d9d2eb1"}, + {file = "Cython-3.0.3-cp311-cp311-win32.whl", hash = "sha256:845e24ee70c204062e03f813114751387abf454b29410336797582e04abbc07b"}, + {file = "Cython-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:e3ad109bdf40f55318e001cad12bcc00e8119569b49f72e442c082355617b036"}, + {file = "Cython-3.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14b898ec2fdeea68f81bd3838b035800b173b59ed532674f65a82724bab35d3b"}, + {file = "Cython-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:188705eeae094bb716bc3e3d0da4e13469f0a0de803b65dfd63fe7eb78ec6173"}, + {file = "Cython-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eb128fa40305f18eaa4d8dd0980033b92db86aada927181d3c3d561aa0634db"}, + {file = "Cython-3.0.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80bd3167e689419cdaf7ede0d20a9f126b9698a43b1f8d3e8f54b970c7a6cd07"}, + {file = "Cython-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d0c7b315f6feb75e2c949dc7816da5626cdca097fea1c0d9f4fdb20d2f4ffc2a"}, + {file = "Cython-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:db9d4de4cd6cd3ad1c3f455aae877ad81a92b92b7cbb01dfb32b6306b873932b"}, + {file = "Cython-3.0.3-cp312-cp312-win32.whl", hash = "sha256:be1a679c7ad90813f9206c9d62993f3bd0cba9330668e97bb3f70c87ae94d5f5"}, + {file = "Cython-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:fa08259f4d176b86561eeff6954f9924099c0b0c128fc2cbfc18343c068ad8ca"}, + {file = "Cython-3.0.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:056340c49bf7861eb1eba941423e67620b7c85e264e9a5594163f1d1e8b95acc"}, + {file = "Cython-3.0.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cfbd60137f6fca9c29101d7517d4e341e0fd279ffc2489634e5e2dd592457c2"}, + {file = "Cython-3.0.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b7e71c16cab0814945014ffb101ead2b173259098bbb1b8138e7a547da3709"}, + {file = "Cython-3.0.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42b1ff0e19fb4d1fe68b60f55d46942ed246a323f6bbeec302924b78b4c3b637"}, + {file = "Cython-3.0.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5d6af87a787d5ce063e28e508fee34755a945e438c68ecda50eb4ea34c30e13f"}, + {file = "Cython-3.0.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0147a31fb73a063bb7b6c69fd843c1a2bad18f326f58048d4ee5bdaef87c9fbf"}, + {file = "Cython-3.0.3-cp36-cp36m-win32.whl", hash = "sha256:84084fa05cf9a67a85818fa72a741d1cae2e3096551158730730a3bafc3b2f52"}, + {file = "Cython-3.0.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8a6a9a2d98758768052e4ac1bea4ebc20fae69b4c19cb2bc5457c9174532d302"}, + {file = "Cython-3.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:94fa403de3a413cd41b8eb4ddb4adcbd66aa0a64f9a84d1c5f696c93572c83aa"}, + {file = "Cython-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e729fd633a5225570c5480b36e7c530c8a82e2ab6d2944ddbe1ddfff5bf181b1"}, + {file = "Cython-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59bf689409b0e51ef673e3dd0348727aef5b67e40f23f806be64c49cee321de0"}, + {file = "Cython-3.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0ac9ec822fad010248b4a59ac197975de38c95378d0f13201c181dd9b0a2624"}, + {file = "Cython-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8e78fc42a6e846941d23aba1aca587520ad38c8970255242f08f9288b0eeba85"}, + {file = "Cython-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e40ac8bd6d11355d354bb4975bb88f6e923ba30f85e38f1f1234b642634e4fc4"}, + {file = "Cython-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:77a920ae19fa1db5adb8a618cebb095ca4f56adfbf9fc32cb7008a590607b62b"}, + {file = "Cython-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0630527a8c9e8fed815c38524e418dab713f5d66f6ac9dc2151b41f3a7727304"}, + {file = "Cython-3.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4e956383e57d00b1fa6449b5ec03b9fa5fce2afd41ef3e518bee8e7c89f1616c"}, + {file = "Cython-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ec9e15b821ef7e3c38abe9e4df4e6dda7af159325bc358afd5a3c2d5027ccfe"}, + {file = "Cython-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f4fb7cc6ad8e99e8f387ebbcded171a701bfbfd8cd3fd46156bf44bb4fd968"}, + {file = "Cython-3.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b50f4f75f89e7eef2ed9c9b60746bc4ab1ba2bc0dff64587133db2b63e068f09"}, + {file = "Cython-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5545d20d7a1c0cf17559152f7f4a465c3d5caace82dd051f82e2d753ae9fd956"}, + {file = "Cython-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1571b045ec1cb15c152c3949f3bd53ee0fa66d434271ea3d225658d99b7e721a"}, + {file = "Cython-3.0.3-cp38-cp38-win32.whl", hash = "sha256:3db04801fd15d826174f63ff45878d4b1e62aff27cf1ea96b186581052d24446"}, + {file = "Cython-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:75d42c8423ab299396f3c938445730600e32e4a2f0298f6f9df4d4a698fe8e16"}, + {file = "Cython-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:48bae87b657009e5648c21d4a92de9f3dc6fed3e35e92957fa8a07a18cea2313"}, + {file = "Cython-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ccde14ddc4b424435cb5722aa1529c254bbf3611e1ad9baea12d25e9c049361"}, + {file = "Cython-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c8e5afcc19861c3b22faafbe906c7e1b23f0595073ac10e21a80dec9e60e7dd"}, + {file = "Cython-3.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e1c9385e99eef299396b9a1e39790e81819446c6a83e249f6f0fc71a64f57a0"}, + {file = "Cython-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d49d20db27c9cfcf45bb1fbf68f777bd1e04e4b949e4e5172d9ee8c9419bc792"}, + {file = "Cython-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d12591939af93c59defea6fc5320ca099eb44e4694e3b2cbe72fb24406079b97"}, + {file = "Cython-3.0.3-cp39-cp39-win32.whl", hash = "sha256:9f40b27545d583fd7df0d3c1b76b3bcaf8a72dbd8d83d5486af2384015660de8"}, + {file = "Cython-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:74ba0f11b384246b7965169f08bf67d426e4957fee5c165571340217a9b43cfc"}, + {file = "Cython-3.0.3-py2.py3-none-any.whl", hash = "sha256:176953a8a2532e34a589625a40c934ff339088f2bf4ddaa2e5cb77b05ca0c25c"}, + {file = "Cython-3.0.3.tar.gz", hash = "sha256:327309301b01f729f173a94511cb2280c87ba03c89ed428e88f913f778245030"}, ] [[package]] @@ -850,36 +850,37 @@ test = ["pytest (>=6)"] [[package]] name = "fastavro" -version = "1.8.3" +version = "1.8.4" description = "Fast read/write of AVRO files" optional = false python-versions = ">=3.8" files = [ - {file = "fastavro-1.8.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e7d98e27cfff61befa23b11422e72a9516fe571d87fd41a656074a958d1f5df"}, - {file = "fastavro-1.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60bfc8b6d3a3e27ae68ce952f9c7a63001dd82f96a519ff25d105a2b61b4bae9"}, - {file = "fastavro-1.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d107cde443d92be8b65007ef304a602702853925c4b9ce63b66b8cdf04938af0"}, - {file = "fastavro-1.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf02deeea731910d55e24f3b44a848007b600ddd0b8861dab9075aa116b0da1"}, - {file = "fastavro-1.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b30bbe06289310ff60c32c0ab01c394354f5bcae421842f06915ee7e401232ee"}, - {file = "fastavro-1.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:c38ba23be298b1df63eaadf0663e8c1dc3fe355608ba3ce769554f61cc20f2d8"}, - {file = "fastavro-1.8.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4e5d8ad6dcf7cc4e15fc5f30344e4fcb1bac5c0c1b48ae88a46ceef470c04b0c"}, - {file = "fastavro-1.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bf6f1debda2bf84f57bdeee289e38e1ea8b23722792b7bdec8be6b3bf4dac67"}, - {file = "fastavro-1.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb2702976cf9bf4e1c66bae3534f498a93272eaa4cf2ba24fe18aa29c5fab647"}, - {file = "fastavro-1.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfc73765b294ef56f71c1bb064ee81efa1da13bb0b1134dd53674bbb89477c78"}, - {file = "fastavro-1.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fa282c78485be06df1e25f0c9b6837de520a22838e7c9af95b58fc68c6c9ce34"}, - {file = "fastavro-1.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:65e59420ce7a8cbb256363b9bc2b98fcd0c220723ec50541aa0aaf137dfa21fb"}, - {file = "fastavro-1.8.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4416fdf69c82364d737e77c2a6ab06eeb20375d84813c061789e20bc047132a5"}, - {file = "fastavro-1.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f00ead48738e882832cc7ad87060365eb3eeace196ff9a5905a4caf0bab351"}, - {file = "fastavro-1.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ce57e57ec56a235ab012fde3ce7eaa3846980a9026448fcb32cb065f2460514"}, - {file = "fastavro-1.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b74fbcf860084576bd773169a18ddd140d06f9e85bb622756f557023947f179f"}, - {file = "fastavro-1.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b9164d7cb1541d15587c3a446f17719bc1f20008a1df1583e55d8b5a323266b5"}, - {file = "fastavro-1.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:7ca4e19db2ded435dd393f58f65297102e7329ca8ba31d03be9c480b34be9123"}, - {file = "fastavro-1.8.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:821b1de9785e266142a8658e25df52bceaa40936c087925688a4fad4dee0beb0"}, - {file = "fastavro-1.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0116bf82a10eb3553d61d6d884f18c8b21049463fdefaaea9275d8bad64a0f5b"}, - {file = "fastavro-1.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6035368cbcbbb1063c2d1ce763ed5a602f4b6af13b325e77a6b61e45f8172067"}, - {file = "fastavro-1.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c0c194340ad5c6a5a5347ba0170d1413c149cd87faddcc519d9fcdedadaa1619"}, - {file = "fastavro-1.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7d2c093be35bcf77547d5cdeeefae2c18b88ae529fa3866da81f5c7c342fceb3"}, - {file = "fastavro-1.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:861efe9ad25fc26c3d360761d48930e6aad0cabe5ae888f92a721699bfe612ed"}, - {file = "fastavro-1.8.3.tar.gz", hash = "sha256:a6c2ec69516e908fce64d93a13e6e83afb880f2edb5ad3adaa1eb04c918de6d8"}, + {file = "fastavro-1.8.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7afe1475e8a967c04e2b0ef4d33bc10bffa66b4fa6e08bd2ee9d91b6768cba2a"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5fd73609f3c1ac0d90ae3179d2fb9d788f842245db2656ff9225fce871fc5b7"}, + {file = "fastavro-1.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78fdf1ba47e43146af72ac48d7b2247a06c4f2d95dfdaad6129c481014b07a6b"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d950542b3263653f00b695cbc728b5c60ab9ea6df32a7017ad9a6a67235386e7"}, + {file = "fastavro-1.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ce2ccfa9aff8df6da683c48542b7b2a216dde6d3a4d1c505c5e1b8ca2ec0abbb"}, + {file = "fastavro-1.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:f12f9914d6196695d3208ea348145a80d0defefe16b8a226373fe8ce68f66139"}, + {file = "fastavro-1.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d353aec9c000b96c33ad285651a2cba0f87fe50fcdecc6120689996af427194d"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eaed91d6e1fb06c172e0aaf4b1ca1fd019c3f4a481e314bf783a4c74f6b7015"}, + {file = "fastavro-1.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9293b303955acd34a6f69dd4ef3465bd575dbde0cd3e3f00227a0ba5012430b4"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b79baefd61554d9f03c4beaebbe638ef175d0efc1fb01f25e88ee6ae97985ab3"}, + {file = "fastavro-1.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:14d7cd3df019d41c66733b8bf5d983af9e1f601d4cb162853a49238a4087d6b0"}, + {file = "fastavro-1.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:c8fb27001b7023910969f15bee2c9205c4e9f40713929d6c1dca8f470fc8fc80"}, + {file = "fastavro-1.8.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e331229acef15f858d9863ced7b629ebef4bd5f80766d367255e51cbf44f8dab"}, + {file = "fastavro-1.8.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:eb76f5bfcde91cde240c93594dae47670cdf1a95d7e5d0dc3ccdef57c6c1c183"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71ebe1cf090f800ca7d4c64d50c81c2a88c56e6ef6aa5eb61ec425e7ae723617"}, + {file = "fastavro-1.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f0ef601943ea11cd02a59c57f5588cea3e300ac67608f53c904ec7aeddd232"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1060318f3ab31bcc4b2184cea3763305b773163381601e304000da81a2f7e11f"}, + {file = "fastavro-1.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01c8c7f22172174f2c2c0922801b552fbca75758f84b0ad3cd6f3e505a76ed05"}, + {file = "fastavro-1.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:bc8a1af80b8face4a41d8526a34b6474a874f7367a900d0b14752eacebb7a2b8"}, + {file = "fastavro-1.8.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:687a2f8fa83a76906c4ec35c9d0500e13a567fc631845f0e47646c48233c7725"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b921c63fcfb9149a32c40a9cd27b0e900fcda602455cbce4d773300019b9ce2"}, + {file = "fastavro-1.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2610a8683b10be7aaa532ddddbcb719883ee2d6f09dafd4a4a7b46d5d719fc07"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:94448dc749d098f846f6a6d82d59f85483bd6fcdecfb6234daac5f4494ae4156"}, + {file = "fastavro-1.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2d39c6b5db7014a3722a7d206310874430486f4895161911b6b6574cb1a6c48f"}, + {file = "fastavro-1.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:3b73472e8da33bcbf08ec989996637aea04eaca71058bb6d45def6fa4168f541"}, + {file = "fastavro-1.8.4.tar.gz", hash = "sha256:dae6118da27e81abf5957dc79a6d778888fc1bbf67645f52959cb2faba95beff"}, ] [package.extras] @@ -1630,13 +1631,13 @@ files = [ [[package]] name = "moto" -version = "4.2.5" +version = "4.2.6" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "moto-4.2.5-py2.py3-none-any.whl", hash = "sha256:4440bc86d5a7bb77043d8031022f549e0c1e2869f1218b0939dbbbdc10776932"}, - {file = "moto-4.2.5.tar.gz", hash = "sha256:cc9650747088a72db810ed0fd96f852b6fa296ca4b5e7818d17d9e7fdaf5dde6"}, + {file = "moto-4.2.6-py2.py3-none-any.whl", hash = "sha256:20cd41f89b7fe363ef49b9ead787c9a1f3d560f4d0711b3767e7416694de1127"}, + {file = "moto-4.2.6.tar.gz", hash = "sha256:ce0a55d7e756c59a5a4392c7097aa5ca53e00aa2dd3f7000093356be15e7aef9"}, ] [package.dependencies] @@ -1651,29 +1652,29 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.7)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.4.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.2.8)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] apigatewayv2 = ["PyYAML (>=5.1)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.7)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.4.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] ds = ["sshpubkeys (>=3.1.0)"] -dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.7)"] -dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.3.7)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.0)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.0)"] ebs = ["sshpubkeys (>=3.1.0)"] ec2 = ["sshpubkeys (>=3.1.0)"] efs = ["sshpubkeys (>=3.1.0)"] eks = ["sshpubkeys (>=3.1.0)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.7)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.7)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"] +proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.4.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.4.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"] route53resolver = ["sshpubkeys (>=3.1.0)"] -s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.3.7)"] -s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.3.7)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.3.7)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.4.0)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.4.0)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.2.8)", "py-partiql-parser (==0.4.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] ssm = ["PyYAML (>=5.1)"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] @@ -1864,13 +1865,13 @@ files = [ [[package]] name = "mypy-boto3-glue" -version = "1.28.36" -description = "Type annotations for boto3.Glue 1.28.36 service generated with mypy-boto3-builder 7.18.0" +version = "1.28.63" +description = "Type annotations for boto3.Glue 1.28.63 service generated with mypy-boto3-builder 7.19.0" optional = true python-versions = ">=3.7" files = [ - {file = "mypy-boto3-glue-1.28.36.tar.gz", hash = "sha256:161771252bb6a220a0bfd8e6ad71da8548599c611f95fe8a94846f4a3386d2ae"}, - {file = "mypy_boto3_glue-1.28.36-py3-none-any.whl", hash = "sha256:73bc14616ac65a5c02adea5efba7bbbcf8207cd0c0e3237c13d351ebc916338d"}, + {file = "mypy-boto3-glue-1.28.63.tar.gz", hash = "sha256:57c849767b7345a51ccb0ceac3ae2cce425489d409658de1843d77011b50a94e"}, + {file = "mypy_boto3_glue-1.28.63-py3-none-any.whl", hash = "sha256:c9e14271d18b23003844f752fa487b97ef69658280b2b3a804d6b78d52b05013"}, ] [package.dependencies] @@ -2157,71 +2158,80 @@ files = [ [[package]] name = "psycopg2-binary" -version = "2.9.8" +version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = true -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "psycopg2-binary-2.9.8.tar.gz", hash = "sha256:80451e6b6b7c486828d5c7ed50769532bbb04ec3a411f1e833539d5c10eb691c"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e271ad6692d50d70ca75db3bd461bfc26316de78de8fe1f504ef16dcea8f2312"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ae22a0fa5c516b84ddb189157fabfa3f12eded5d630e1ce260a18e1771f8707"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a971086db0069aef2fd22ccffb670baac427f4ee2174c4f5c7206254f1e6794"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6928a502af71ca2ac9aad535e78c8309892ed3bfa7933182d4c760580c8af4"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f955fe6301b84b6fd13970a05f3640fbb62ca3a0d19342356585006c830e038"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3723c3f009e2b2771f2491b330edb7091846f1aad0c08fbbd9a1383d6a0c0841"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e3142c7e51b92855cff300580de949e36a94ab3bfa8f353b27fe26535e9b3542"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:de85105c568dc5f0f0efe793209ba83e4675d53d00faffc7a7c7a8bea9e0e19a"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c7ff2b6a79a92b1b169b03bb91b41806843f0cdf6055256554495bffed1d496d"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59f45cca0765aabb52a5822c72d5ff2ec46a28b1c1702de90dc0d306ec5c2001"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-win32.whl", hash = "sha256:1dbad789ebd1e61201256a19dc2e90fed4706bc966ccad4f374648e5336b1ab4"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-win_amd64.whl", hash = "sha256:15458c81b0d199ab55825007115f697722831656e6477a427783fe75c201c82b"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:395c217156723fe21809dfe8f7a433c5bf8e9bce229944668e4ec709c37c5442"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14f85ff2d5d826a7ce9e6c31e803281ed5a096789f47f52cb728c88f488de01b"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e11373d8e4f1f46cf3065bf613f0df9854803dc95aa4a35354ffac19f8c52127"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01f9731761f711e42459f87bd2ad5d744b9773b5dd05446f3b579a0f077e78e3"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bf5c27bd5867a5fa5341fad29f0d5838e2fed617ef5346884baf8b8b16dd82"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bfabbd7e70785af726cc0209e8e64b926abf91741eca80678b221aad9e72135"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6369f4bd4d27944498094dccced1ae7ca43376a59dbfe4c8b6a16e9e3dc3ccce"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4879ee1d07a6b2c232ae6a74570f4788cd7a29b3cd38bc39bf60225b1d075c78"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4336afc0e81726350bd5863e3c3116d8c12aa7f457d3d0b3b3dc36137fec6feb"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:63ce1dccfd08d9c5341ac82d62aa04345bc4bf41b5e5b7b2c6c172a28e0eda27"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-win32.whl", hash = "sha256:59421806c1a0803ea7de9ed061d656c041a84db0da7e73266b98db4c7ba263da"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-win_amd64.whl", hash = "sha256:ccaa2ae03990cedde1f618ff11ec89fefa84622da73091a67b44553ca8be6711"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5aa0c99c12075c593dcdccbb8a7aaa714b716560cc99ef9206f9e75b77520801"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91719f53ed2a95ebecefac48d855d811cba9d9fe300acc162993bdfde9bc1c3b"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c68a2e1afb4f2a5bb4b7bb8f90298d21196ac1c66418523e549430b8c4b7cb1e"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278ebd63ced5a5f3af5394cb75a9a067243eee21f42f0126c6f1cf85eaeb90f9"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c84ff9682bc4520504c474e189b3de7c4a4029e529c8b775e39c95c33073767"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6f5e70e40dae47a4dc7f8eb390753bb599b0f4ede314580e6faa3b7383695d19"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:673eafbdaa4ed9f5164c90e191c3895cc5f866b9b379fdb59f3a2294e914d9bd"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5a0a6e4004697ec98035ff3b8dfc4dba8daa477b23ee891d831cd3cd65ace6be"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d29efab3c5d6d978115855a0f2643e0ee8c6450dc536d5b4afec6f52ab99e99e"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-win32.whl", hash = "sha256:d4a19a3332f2ac6d093e60a6f1c589f97eb9f9de7e27ea80d67f188384e31572"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-win_amd64.whl", hash = "sha256:5262713988d97a9d4cd54b682dec4a413b87b76790e5b16f480450550d11a8f7"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e46b0f4683539965ce849f2c13fc53e323bb08d84d4ba2e4b3d976f364c84210"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3fd44b52bc9c74c1512662e8da113a1c55127adeeacebaf460babe766517b049"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b6c607ecb6a9c245ebe162d63ccd9222d38efa3c858bbe38d32810b08b8f87e"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6ef615d48fa60361e57f998327046bd89679c25d06eee9e78156be5a7a76e03"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65403113ac3a4813a1409fb6a1e43c658b459cc8ed8afcc5f4baf02ec8be4334"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debcb23a052f3fb4c165789ea513b562b2fac0f0f4f53eaf3cf4dc648907ff8"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dc145a241e1f6381efb924bcf3e3462d6020b8a147363f9111eb0a9c89331ad7"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1d669887df169a9b0c09e0f5b46891511850a9ddfcde3593408af9d9774c5c3a"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:19d40993701e39c49b50e75cd690a6af796d7e7210941ee0fe49cf12b25840e5"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b8b2cdf3bce4dd91dc035fbff4eb812f5607dda91364dc216b0920b97b521c7"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-win32.whl", hash = "sha256:4960c881471ca710b81a67ef148c33ee121c1f8e47a639cf7e06537fe9fee337"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-win_amd64.whl", hash = "sha256:aeb09db95f38e75ae04e947d283e07be34d03c4c2ace4f0b73dbb9143d506e67"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5aef3296d44d05805e634dbbd2972aa8eb7497926dd86047f5e39a79c3ecc086"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4d6b592ecc8667e608b9e7344259fbfb428cc053df0062ec3ac75d8270cd5a9f"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:693a4e7641556f0b421a7d6c6a74058aead407d860ac1cb9d0bf25be0ca73de8"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf60c599c40c266a01c458e9c71db7132b11760f98f08233f19b3e0a2153cbf1"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cbe1e19f59950afd66764e3c905ecee9f2aee9f8df2ef35af6f7948ad93f620"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc37de7e3a87f5966965fc874d33c9b68d638e6c3718fdf32a5083de563428b0"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6e1bb4eb0d9925d65dabaaabcbb279fab444ba66d73f86d4c07dfd11f0139c06"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e7bdc94217ae20ad03b375a991e107a31814053bee900ad8c967bf82ef3ff02e"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:565edaf9f691b17a7fdbabd368b5b3e67d0fdc8f7f6b52177c1d3289f4e763fd"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0e3071c947bda6afc6fe2e7b64ebd64fb2cad1bc0e705a3594cb499291f2dfec"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-win32.whl", hash = "sha256:205cecdd81ff4f1ddd687ce7d06879b9b80cccc428d8d6ebf36fcba08bb6d361"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-win_amd64.whl", hash = "sha256:1f279ba74f0d6b374526e5976c626d2ac3b8333b6a7b08755c513f4d380d3add"}, + {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, ] [[package]] @@ -2725,35 +2735,35 @@ files = [ [[package]] name = "ray" -version = "2.7.0" +version = "2.7.1" description = "Ray provides a simple, universal API for building distributed applications." optional = true python-versions = "*" files = [ - {file = "ray-2.7.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:bc911655908b61b2e9f59b8df158fcc62cd32080c468b484b539ebf0a4111d04"}, - {file = "ray-2.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0ee8c14e1521559cd5802bfad3f0aba4a77afdfba57dd446162a7449c6e8ff68"}, - {file = "ray-2.7.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ebde44af7d479ede21d1c2e68b5ccd8264e18df6e4f3c216d9e99c31e819bde6"}, - {file = "ray-2.7.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:b83621f5d2d4079e6ae624c3bf30046a4fefa0ea7ea5e4a4dfe4b50c580b3768"}, - {file = "ray-2.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:8e1b06abba6e227b8dde1ad861c587fb2608a6970d270e4755cd24a6f37ed565"}, - {file = "ray-2.7.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5442d48719f033831a324f05b332d6e7181970d721e9504be2091cc9d9735394"}, - {file = "ray-2.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ca8225878cce7b9e2d0ca9668d9370893a7cee35629d11a3889a1b66a0007218"}, - {file = "ray-2.7.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:a3f59dbb0780f9fa11f5bf96bef853b4cb95245456d4400e1c7bf2e514d12ab2"}, - {file = "ray-2.7.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:8384b3f30bc1446ef810e9e894afa03238c5ac40d3c40c0740d82f347112015d"}, - {file = "ray-2.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d4530e7024375505552dabd3f4441fc9ac7a5562365a81ba9afa14185433879"}, - {file = "ray-2.7.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:c491b8051eef82b77d136c48a23d16485c0e54233303ccf68e9fe69a06c517e6"}, - {file = "ray-2.7.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:1684c434886cb7b263cdf98ed39d75dec343e949f7b14f3385d83bfe70ee8c80"}, - {file = "ray-2.7.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:856a9ae164b9b0aeaad54f3e78986eb19900ed3c74e26f51b02a7d8826c97e59"}, - {file = "ray-2.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:34925a90b6239de42592bb4524dcbdc59a9c65f1f74ad4d9f97f636bd59c73d7"}, - {file = "ray-2.7.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:60db240f37d80a80492e09a8d1e29b79d034431c6fcb651401e9e2d24d850793"}, - {file = "ray-2.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:835155fdeb8698eae426f3d9416e6b8165197fe5c1c74e1b02a429fc7f4ddcd2"}, - {file = "ray-2.7.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:442b7568946081d38c8addbc528e7b09fc1ee25453b4800c86b7e5ba4bce9dd3"}, - {file = "ray-2.7.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:3825292b777b423e2cd34bf66e8e1e7701b04c6a5308f9f291ad5929b289dc47"}, - {file = "ray-2.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:ce700322662946ad5c62a39b78e81feebcb855d378c49f5df6477c22f0ac1e5a"}, - {file = "ray-2.7.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:327c23aac5dd26ee4abe6cee70320322d63fdf97c6028fbb9555724b46a8f3e3"}, - {file = "ray-2.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a09021d45312ab7a44109b251984718b65fbff77df0b55e30e651193cdf42bff"}, - {file = "ray-2.7.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f05fcb609962d14f4d23cc88a9d07cafa7077ce3c5d5ee99cd08a19067b7eecf"}, - {file = "ray-2.7.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:0e0f7dbeb4444940c72b64fdecd6f331593466914b2dffeed03ce97225acec14"}, - {file = "ray-2.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:29a0866316756ae18e232dd074adbf408dcdabe95d135a9a96b9a8c24393c983"}, + {file = "ray-2.7.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:4a2c98ab42881836894f20408ce40c0fd7fe5da7f0bc69cf22c951ccceda55ed"}, + {file = "ray-2.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:53800aadfc07152bc8672d5fa91bb4dc17d96b572a9bd436dd00fd2e0d07ef6a"}, + {file = "ray-2.7.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:17a425b4a2c2098f78fd0ab3831a35a53608d36466453e90c30a6495e9dce354"}, + {file = "ray-2.7.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:9681a8a7bf081e2244360206f3cd80d1a6adb4dc6330a507fd8c78ebe6e57365"}, + {file = "ray-2.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:148c77050ceab3c90739147bb86ac535e9590046cc36364ae9eb15469ea16fbc"}, + {file = "ray-2.7.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:0b0e80e26d6899820c12301626a74a209ab29373f46caf5b48c3ae3f99ec1bc7"}, + {file = "ray-2.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b5d13e910bb3449ef7b25084dcc4f0b9a763d3aa7b2fdd39e3b4d93d8c266951"}, + {file = "ray-2.7.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:0a6e8a736fe5294a0b0064679e59e393c66942db81fdf95804bdc1495d1f1651"}, + {file = "ray-2.7.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:f4c9f8a813444bd5346756db1a6d6e09a805b28b5fb6831e91b8d1324c12a888"}, + {file = "ray-2.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:85a8b0f122e4c14d2ee354fce9651834f7ffc9b60ebdce023a5ba8ca5841a6ee"}, + {file = "ray-2.7.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:bfa924bbc4042e83a0f31f058f08818418307252fceeee27c4c02bc0d3c02f3f"}, + {file = "ray-2.7.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0f5657abb376eddf6b56489082d2f94ab36597a2f25da2849e2f66476b90dcc0"}, + {file = "ray-2.7.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:d548e1c67a512975c4241be64a8df2153ae6c29ee2f5b08834fadcad7dfc94a4"}, + {file = "ray-2.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1f4c09a81971cc54d95be55b9b413fd12121a37528b402d1861a8fa0b4e85509"}, + {file = "ray-2.7.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:1f6d2508d117aac0b880d26a4db65a9f90def2d688709b62e0d039879c3afc7a"}, + {file = "ray-2.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32a6c0866d559d4e6c623ff220cd0790d2da1f3785073a5d0444b8f0486ff541"}, + {file = "ray-2.7.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d035642e6033f43551a0c17e2363a392739f01df6b4072c5ed71cf3096936d33"}, + {file = "ray-2.7.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a366569d1bd220a92af0dbe092821a11d1ff8ad7b00ed4f74b8a5f380e34ccc7"}, + {file = "ray-2.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:6fe65dc7f83f1c617af3068d84f8c67f3371b1a48776e44ab6af54998891364c"}, + {file = "ray-2.7.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:3c1501ca56da394e07213efd5be42c2cf0a2eae68d76949d26a3133154d6d9ff"}, + {file = "ray-2.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57f7e05ad275317158c447680705e046410f68d2a5992e16d07bbc2cc79da2b3"}, + {file = "ray-2.7.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b5410ae53c765108c65821fc5e5968509579f98a64d275e103408e1b068e8ca8"}, + {file = "ray-2.7.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:1b096abab78b63db6c1a2633f242dd8b3c51e395b574215f3cb8e47f5d7364b9"}, + {file = "ray-2.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:c03fe26443598bd7ad1c22de4585daec324bc03eabc04d3c2f805d9697a554d6"}, ] [package.dependencies] @@ -2774,9 +2784,9 @@ requests = "*" [package.extras] air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml", "ray-cpp (==2.7.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml", "ray-cpp (==2.7.1)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] client = ["grpcio (!=1.56.0)"] -cpp = ["ray-cpp (==2.7.0)"] +cpp = ["ray-cpp (==2.7.1)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "virtualenv (>=20.0.24,<20.21.1)"] observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] @@ -3116,6 +3126,14 @@ files = [ {file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b69f1f754d92eb1cc6b50938359dead36b96a1dcf11a8670bff65fd9b21a4b09"}, {file = "SQLAlchemy-2.0.21-cp311-cp311-win32.whl", hash = "sha256:af520a730d523eab77d754f5cf44cc7dd7ad2d54907adeb3233177eeb22f271b"}, {file = "SQLAlchemy-2.0.21-cp311-cp311-win_amd64.whl", hash = "sha256:141675dae56522126986fa4ca713739d00ed3a6f08f3c2eb92c39c6dfec463ce"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:56628ca27aa17b5890391ded4e385bf0480209726f198799b7e980c6bd473bd7"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db726be58837fe5ac39859e0fa40baafe54c6d54c02aba1d47d25536170b690f"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7421c1bfdbb7214313919472307be650bd45c4dc2fcb317d64d078993de045b"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632784f7a6f12cfa0e84bf2a5003b07660addccf5563c132cd23b7cc1d7371a9"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f6f7276cf26145a888f2182a98f204541b519d9ea358a65d82095d9c9e22f917"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2a1f7ffac934bc0ea717fa1596f938483fb8c402233f9b26679b4f7b38d6ab6e"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-win32.whl", hash = "sha256:bfece2f7cec502ec5f759bbc09ce711445372deeac3628f6fa1c16b7fb45b682"}, + {file = "SQLAlchemy-2.0.21-cp312-cp312-win_amd64.whl", hash = "sha256:526b869a0f4f000d8d8ee3409d0becca30ae73f494cbb48801da0129601f72c6"}, {file = "SQLAlchemy-2.0.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7614f1eab4336df7dd6bee05bc974f2b02c38d3d0c78060c5faa4cd1ca2af3b8"}, {file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d59cb9e20d79686aa473e0302e4a82882d7118744d30bb1dfb62d3c47141b3ec"}, {file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a95aa0672e3065d43c8aa80080cdd5cc40fe92dc873749e6c1cf23914c4b83af"}, @@ -3573,4 +3591,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "dde78de46b3fd5f6f251dc5a4b89afee2ed47a0c0fa2e6249a2c3f1772ec7f12" +content-hash = "906e13ee9f41661de9fcf3ccd32afa16c9d604e176c52ec627acd6a797b1db49" diff --git a/pyiceberg/__init__.py b/pyiceberg/__init__.py index c95a617a26..8a72b22c56 100644 --- a/pyiceberg/__init__.py +++ b/pyiceberg/__init__.py @@ -15,4 +15,4 @@ # specific language governing permissions and limitations # under the License. -__version__ = "0.5.0" +__version__ = "0.5.1" diff --git a/pyiceberg/avro/file.py b/pyiceberg/avro/file.py index dc843f6dc0..4985c6fb60 100644 --- a/pyiceberg/avro/file.py +++ b/pyiceberg/avro/file.py @@ -38,7 +38,7 @@ from pyiceberg.avro.decoder import BinaryDecoder, new_decoder from pyiceberg.avro.encoder import BinaryEncoder from pyiceberg.avro.reader import Reader -from pyiceberg.avro.resolver import construct_reader, construct_writer, resolve +from pyiceberg.avro.resolver import construct_reader, construct_writer, resolve_reader, resolve_writer from pyiceberg.avro.writer import Writer from pyiceberg.io import InputFile, OutputFile, OutputStream from pyiceberg.schema import Schema @@ -172,7 +172,7 @@ def __enter__(self) -> AvroFile[D]: if not self.read_schema: self.read_schema = self.schema - self.reader = resolve(self.schema, self.read_schema, self.read_types, self.read_enums) + self.reader = resolve_reader(self.schema, self.read_schema, self.read_types, self.read_enums) return self @@ -222,18 +222,29 @@ def _read_header(self) -> AvroFileHeader: class AvroOutputFile(Generic[D]): output_file: OutputFile output_stream: OutputStream - schema: Schema + file_schema: Schema schema_name: str encoder: BinaryEncoder sync_bytes: bytes writer: Writer - def __init__(self, output_file: OutputFile, schema: Schema, schema_name: str, metadata: Dict[str, str] = EMPTY_DICT) -> None: + def __init__( + self, + output_file: OutputFile, + file_schema: Schema, + schema_name: str, + record_schema: Optional[Schema] = None, + metadata: Dict[str, str] = EMPTY_DICT, + ) -> None: self.output_file = output_file - self.schema = schema + self.file_schema = file_schema self.schema_name = schema_name self.sync_bytes = os.urandom(SYNC_SIZE) - self.writer = construct_writer(self.schema) + self.writer = ( + construct_writer(file_schema=self.file_schema) + if record_schema is None + else resolve_writer(record_schema=record_schema, file_schema=self.file_schema) + ) self.metadata = metadata def __enter__(self) -> AvroOutputFile[D]: @@ -247,7 +258,6 @@ def __enter__(self) -> AvroOutputFile[D]: self.encoder = BinaryEncoder(self.output_stream) self._write_header() - self.writer = construct_writer(self.schema) return self @@ -258,7 +268,7 @@ def __exit__( self.output_stream.close() def _write_header(self) -> None: - json_schema = json.dumps(AvroSchemaConversion().iceberg_to_avro(self.schema, schema_name=self.schema_name)) + json_schema = json.dumps(AvroSchemaConversion().iceberg_to_avro(self.file_schema, schema_name=self.schema_name)) meta = {**self.metadata, _SCHEMA_KEY: json_schema, _CODEC_KEY: "null"} header = AvroFileHeader(magic=MAGIC, meta=meta, sync=self.sync_bytes) construct_writer(META_SCHEMA).write(self.encoder, header) diff --git a/pyiceberg/avro/resolver.py b/pyiceberg/avro/resolver.py index 8b2daeb7c7..faf4dd0501 100644 --- a/pyiceberg/avro/resolver.py +++ b/pyiceberg/avro/resolver.py @@ -53,6 +53,7 @@ BooleanWriter, DateWriter, DecimalWriter, + DefaultWriter, DoubleWriter, FixedWriter, FloatWriter, @@ -112,11 +113,12 @@ def construct_reader( Args: file_schema (Schema | IcebergType): The schema of the Avro file. + read_types (Dict[int, Callable[..., StructProtocol]]): Constructors for structs for certain field-ids Raises: NotImplementedError: If attempting to resolve an unrecognized object type. """ - return resolve(file_schema, file_schema, read_types) + return resolve_reader(file_schema, file_schema, read_types) def construct_writer(file_schema: Union[Schema, IcebergType]) -> Writer: @@ -128,7 +130,7 @@ def construct_writer(file_schema: Union[Schema, IcebergType]) -> Writer: Raises: NotImplementedError: If attempting to resolve an unrecognized object type. """ - return visit(file_schema, ConstructWriter()) + return visit(file_schema, CONSTRUCT_WRITER_VISITOR) class ConstructWriter(SchemaVisitorPerPrimitiveType[Writer]): @@ -138,7 +140,7 @@ def schema(self, schema: Schema, struct_result: Writer) -> Writer: return struct_result def struct(self, struct: StructType, field_results: List[Writer]) -> Writer: - return StructWriter(tuple(field_results)) + return StructWriter(tuple((pos, result) for pos, result in enumerate(field_results))) def field(self, field: NestedField, field_result: Writer) -> Writer: return field_result if field.required else OptionWriter(field_result) @@ -192,7 +194,28 @@ def visit_binary(self, binary_type: BinaryType) -> Writer: return BinaryWriter() -def resolve( +CONSTRUCT_WRITER_VISITOR = ConstructWriter() + + +def resolve_writer( + record_schema: Union[Schema, IcebergType], + file_schema: Union[Schema, IcebergType], +) -> Writer: + """Resolve the file and read schema to produce a reader. + + Args: + record_schema (Schema | IcebergType): The schema of the record in memory. + file_schema (Schema | IcebergType): The schema of the file that will be written + + Raises: + NotImplementedError: If attempting to resolve an unrecognized object type. + """ + if record_schema == file_schema: + return construct_writer(file_schema) + return visit_with_partner(file_schema, record_schema, WriteSchemaResolver(), SchemaPartnerAccessor()) # type: ignore + + +def resolve_reader( file_schema: Union[Schema, IcebergType], read_schema: Union[Schema, IcebergType], read_types: Dict[int, Callable[..., StructProtocol]] = EMPTY_DICT, @@ -210,7 +233,7 @@ def resolve( NotImplementedError: If attempting to resolve an unrecognized object type. """ return visit_with_partner( - file_schema, read_schema, SchemaResolver(read_types, read_enums), SchemaPartnerAccessor() + file_schema, read_schema, ReadSchemaResolver(read_types, read_enums), SchemaPartnerAccessor() ) # type: ignore @@ -233,7 +256,95 @@ def skip(self, decoder: BinaryDecoder) -> None: pass -class SchemaResolver(PrimitiveWithPartnerVisitor[IcebergType, Reader]): +class WriteSchemaResolver(PrimitiveWithPartnerVisitor[IcebergType, Writer]): + def schema(self, file_schema: Schema, record_schema: Optional[IcebergType], result: Writer) -> Writer: + return result + + def struct(self, file_schema: StructType, record_struct: Optional[IcebergType], file_writers: List[Writer]) -> Writer: + if not isinstance(record_struct, StructType): + raise ResolveError(f"File/write schema are not aligned for struct, got {record_struct}") + + record_struct_positions: Dict[int, int] = {field.field_id: pos for pos, field in enumerate(record_struct.fields)} + results: List[Tuple[Optional[int], Writer]] = [] + + for writer, file_field in zip(file_writers, file_schema.fields): + if file_field.field_id in record_struct_positions: + results.append((record_struct_positions[file_field.field_id], writer)) + elif file_field.required: + # There is a default value + if file_field.write_default is not None: + # The field is not in the record, but there is a write default value + results.append((None, DefaultWriter(writer=writer, value=file_field.write_default))) # type: ignore + elif file_field.required: + raise ValueError(f"Field is required, and there is no write default: {file_field}") + else: + results.append((None, writer)) + + return StructWriter(field_writers=tuple(results)) + + def field(self, file_field: NestedField, record_type: Optional[IcebergType], field_writer: Writer) -> Writer: + return field_writer if file_field.required else OptionWriter(field_writer) + + def list(self, file_list_type: ListType, file_list: Optional[IcebergType], element_writer: Writer) -> Writer: + return ListWriter(element_writer if file_list_type.element_required else OptionWriter(element_writer)) + + def map( + self, file_map_type: MapType, file_primitive: Optional[IcebergType], key_writer: Writer, value_writer: Writer + ) -> Writer: + return MapWriter(key_writer, value_writer if file_map_type.value_required else OptionWriter(value_writer)) + + def primitive(self, file_primitive: PrimitiveType, record_primitive: Optional[IcebergType]) -> Writer: + if record_primitive is not None: + # ensure that the type can be projected to the expected + if file_primitive != record_primitive: + promote(record_primitive, file_primitive) + + return super().primitive(file_primitive, file_primitive) + + def visit_boolean(self, boolean_type: BooleanType, partner: Optional[IcebergType]) -> Writer: + return BooleanWriter() + + def visit_integer(self, integer_type: IntegerType, partner: Optional[IcebergType]) -> Writer: + return IntegerWriter() + + def visit_long(self, long_type: LongType, partner: Optional[IcebergType]) -> Writer: + return IntegerWriter() + + def visit_float(self, float_type: FloatType, partner: Optional[IcebergType]) -> Writer: + return FloatWriter() + + def visit_double(self, double_type: DoubleType, partner: Optional[IcebergType]) -> Writer: + return DoubleWriter() + + def visit_decimal(self, decimal_type: DecimalType, partner: Optional[IcebergType]) -> Writer: + return DecimalWriter(decimal_type.precision, decimal_type.scale) + + def visit_date(self, date_type: DateType, partner: Optional[IcebergType]) -> Writer: + return DateWriter() + + def visit_time(self, time_type: TimeType, partner: Optional[IcebergType]) -> Writer: + return TimeWriter() + + def visit_timestamp(self, timestamp_type: TimestampType, partner: Optional[IcebergType]) -> Writer: + return TimestampWriter() + + def visit_timestamptz(self, timestamptz_type: TimestamptzType, partner: Optional[IcebergType]) -> Writer: + return TimestamptzWriter() + + def visit_string(self, string_type: StringType, partner: Optional[IcebergType]) -> Writer: + return StringWriter() + + def visit_uuid(self, uuid_type: UUIDType, partner: Optional[IcebergType]) -> Writer: + return UUIDWriter() + + def visit_fixed(self, fixed_type: FixedType, partner: Optional[IcebergType]) -> Writer: + return FixedWriter(len(fixed_type)) + + def visit_binary(self, binary_type: BinaryType, partner: Optional[IcebergType]) -> Writer: + return BinaryWriter() + + +class ReadSchemaResolver(PrimitiveWithPartnerVisitor[IcebergType, Reader]): __slots__ = ("read_types", "read_enums", "context") read_types: Dict[int, Callable[..., StructProtocol]] read_enums: Dict[int, Callable[..., Enum]] @@ -279,7 +390,7 @@ def struct(self, struct: StructType, expected_struct: Optional[IcebergType], fie for field, result_reader in zip(struct.fields, field_readers) ] - file_fields = {field.field_id: field for field in struct.fields} + file_fields = {field.field_id for field in struct.fields} for pos, read_field in enumerate(expected_struct.fields): if read_field.field_id not in file_fields: if isinstance(read_field, NestedField) and read_field.initial_default is not None: diff --git a/pyiceberg/avro/writer.py b/pyiceberg/avro/writer.py index ad6a755614..4e3d3d476a 100644 --- a/pyiceberg/avro/writer.py +++ b/pyiceberg/avro/writer.py @@ -29,6 +29,7 @@ Any, Dict, List, + Optional, Tuple, ) from uuid import UUID @@ -39,6 +40,7 @@ from pyiceberg.utils.singleton import Singleton +@dataclass(frozen=True) class Writer(Singleton): @abstractmethod def write(self, encoder: BinaryEncoder, val: Any) -> Any: @@ -49,16 +51,13 @@ def __repr__(self) -> str: return f"{self.__class__.__name__}()" -class NoneWriter(Writer): - def write(self, _: BinaryEncoder, __: Any) -> None: - pass - - +@dataclass(frozen=True) class BooleanWriter(Writer): def write(self, encoder: BinaryEncoder, val: bool) -> None: encoder.write_boolean(val) +@dataclass(frozen=True) class IntegerWriter(Writer): """Longs and ints are encoded the same way, and there is no long in Python.""" @@ -66,41 +65,49 @@ def write(self, encoder: BinaryEncoder, val: int) -> None: encoder.write_int(val) +@dataclass(frozen=True) class FloatWriter(Writer): def write(self, encoder: BinaryEncoder, val: float) -> None: encoder.write_float(val) +@dataclass(frozen=True) class DoubleWriter(Writer): def write(self, encoder: BinaryEncoder, val: float) -> None: encoder.write_double(val) +@dataclass(frozen=True) class DateWriter(Writer): def write(self, encoder: BinaryEncoder, val: int) -> None: encoder.write_int(val) +@dataclass(frozen=True) class TimeWriter(Writer): def write(self, encoder: BinaryEncoder, val: int) -> None: encoder.write_int(val) +@dataclass(frozen=True) class TimestampWriter(Writer): def write(self, encoder: BinaryEncoder, val: int) -> None: encoder.write_int(val) +@dataclass(frozen=True) class TimestamptzWriter(Writer): def write(self, encoder: BinaryEncoder, val: int) -> None: encoder.write_int(val) +@dataclass(frozen=True) class StringWriter(Writer): def write(self, encoder: BinaryEncoder, val: Any) -> None: encoder.write_utf8(val) +@dataclass(frozen=True) class UUIDWriter(Writer): def write(self, encoder: BinaryEncoder, val: UUID) -> None: encoder.write(val.bytes) @@ -124,6 +131,7 @@ def __repr__(self) -> str: return f"FixedWriter({self._len})" +@dataclass(frozen=True) class BinaryWriter(Writer): """Variable byte length writer.""" @@ -158,11 +166,12 @@ def write(self, encoder: BinaryEncoder, val: Any) -> None: @dataclass(frozen=True) class StructWriter(Writer): - field_writers: Tuple[Writer, ...] = dataclassfield() + field_writers: Tuple[Tuple[Optional[int], Writer], ...] = dataclassfield() def write(self, encoder: BinaryEncoder, val: Record) -> None: - for writer, value in zip(self.field_writers, val.record_fields()): - writer.write(encoder, value) + for pos, writer in self.field_writers: + # When pos is None, then it is a default value + writer.write(encoder, val[pos] if pos is not None else None) def __eq__(self, other: Any) -> bool: """Implement the equality operator for this object.""" @@ -170,7 +179,7 @@ def __eq__(self, other: Any) -> bool: def __repr__(self) -> str: """Return string representation of this object.""" - return f"StructWriter({','.join(repr(field) for field in self.field_writers)})" + return f"StructWriter(tuple(({','.join(repr(field) for field in self.field_writers)})))" def __hash__(self) -> int: """Return the hash of the writer as hash of this object.""" @@ -201,3 +210,12 @@ def write(self, encoder: BinaryEncoder, val: Dict[Any, Any]) -> None: self.value_writer.write(encoder, v) if len(val) > 0: encoder.write_int(0) + + +@dataclass(frozen=True) +class DefaultWriter(Writer): + writer: Writer + value: Any + + def write(self, encoder: BinaryEncoder, _: Any) -> None: + self.writer.write(encoder, self.value) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 0023e18984..20875d313d 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -62,6 +62,7 @@ CommitTableRequest, CommitTableResponse, Table, + TableIdentifier, TableMetadata, ) from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder @@ -301,7 +302,10 @@ def _fetch_config(self) -> None: # Update URI based on overrides self.uri = config[URI] - def _split_identifier_for_path(self, identifier: Union[str, Identifier]) -> Properties: + def _split_identifier_for_path(self, identifier: Union[str, Identifier, TableIdentifier]) -> Properties: + if isinstance(identifier, TableIdentifier): + return {"namespace": NAMESPACE_SEPARATOR.join(identifier.namespace.root[1:]), "table": identifier.name} + identifier_tuple = self.identifier_to_tuple(identifier) if len(identifier_tuple) <= 1: raise NoSuchTableError(f"Missing namespace or invalid identifier: {'.'.join(identifier_tuple)}") @@ -315,6 +319,10 @@ def _split_identifier_for_json(self, identifier: Union[str, Identifier]) -> Dict def _handle_non_200_response(self, exc: HTTPError, error_handler: Dict[int, Type[Exception]]) -> None: exception: Type[Exception] + + if exc.response is None: + raise ValueError("Did not receive a response") + code = exc.response.status_code if code in error_handler: exception = error_handler[code] diff --git a/pyiceberg/expressions/parser.py b/pyiceberg/expressions/parser.py index d6d5bdb794..a5452f5001 100644 --- a/pyiceberg/expressions/parser.py +++ b/pyiceberg/expressions/parser.py @@ -233,11 +233,11 @@ def handle_not(result: ParseResults) -> Not: def handle_and(result: ParseResults) -> And: - return And(result[0][0], result[0][1]) + return And(*result[0]) def handle_or(result: ParseResults) -> Or: - return Or(result[0][0], result[0][1]) + return Or(*result[0]) boolean_expression = infix_notation( diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index f2d60e7534..29d4a4b170 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -1099,21 +1099,70 @@ def map_value_partner(self, partner_map: Optional[pa.Array]) -> Optional[pa.Arra return partner_map.items if isinstance(partner_map, pa.MapArray) else None -_PRIMITIVE_TO_PHYSICAL = { - BooleanType(): "BOOLEAN", - IntegerType(): "INT32", - LongType(): "INT64", - FloatType(): "FLOAT", - DoubleType(): "DOUBLE", - DateType(): "INT32", - TimeType(): "INT64", - TimestampType(): "INT64", - TimestamptzType(): "INT64", - StringType(): "BYTE_ARRAY", - UUIDType(): "FIXED_LEN_BYTE_ARRAY", - BinaryType(): "BYTE_ARRAY", -} -_PHYSICAL_TYPES = set(_PRIMITIVE_TO_PHYSICAL.values()).union({"INT96"}) +def _primitive_to_phyisical(iceberg_type: PrimitiveType) -> str: + return visit(iceberg_type, _PRIMITIVE_TO_PHYISCAL_TYPE_VISITOR) + + +class PrimitiveToPhysicalType(SchemaVisitorPerPrimitiveType[str]): + def schema(self, schema: Schema, struct_result: str) -> str: + raise ValueError(f"Expected primitive-type, got: {schema}") + + def struct(self, struct: StructType, field_results: List[str]) -> str: + raise ValueError(f"Expected primitive-type, got: {struct}") + + def field(self, field: NestedField, field_result: str) -> str: + raise ValueError(f"Expected primitive-type, got: {field}") + + def list(self, list_type: ListType, element_result: str) -> str: + raise ValueError(f"Expected primitive-type, got: {list_type}") + + def map(self, map_type: MapType, key_result: str, value_result: str) -> str: + raise ValueError(f"Expected primitive-type, got: {map_type}") + + def visit_fixed(self, fixed_type: FixedType) -> str: + return "FIXED_LEN_BYTE_ARRAY" + + def visit_decimal(self, decimal_type: DecimalType) -> str: + return "FIXED_LEN_BYTE_ARRAY" + + def visit_boolean(self, boolean_type: BooleanType) -> str: + return "BOOLEAN" + + def visit_integer(self, integer_type: IntegerType) -> str: + return "INT32" + + def visit_long(self, long_type: LongType) -> str: + return "INT64" + + def visit_float(self, float_type: FloatType) -> str: + return "FLOAT" + + def visit_double(self, double_type: DoubleType) -> str: + return "DOUBLE" + + def visit_date(self, date_type: DateType) -> str: + return "INT32" + + def visit_time(self, time_type: TimeType) -> str: + return "INT64" + + def visit_timestamp(self, timestamp_type: TimestampType) -> str: + return "INT64" + + def visit_timestamptz(self, timestamptz_type: TimestamptzType) -> str: + return "INT64" + + def visit_string(self, string_type: StringType) -> str: + return "BYTE_ARRAY" + + def visit_uuid(self, uuid_type: UUIDType) -> str: + return "FIXED_LEN_BYTE_ARRAY" + + def visit_binary(self, binary_type: BinaryType) -> str: + return "BYTE_ARRAY" + + +_PRIMITIVE_TO_PHYISCAL_TYPE_VISITOR = PrimitiveToPhysicalType() class StatsAggregator: @@ -1126,13 +1175,7 @@ def __init__(self, iceberg_type: PrimitiveType, physical_type_string: str, trunc self.current_max = None self.trunc_length = trunc_length - if physical_type_string not in _PHYSICAL_TYPES: - raise ValueError(f"Unknown physical type {physical_type_string}") - - if physical_type_string == "INT96": - raise NotImplementedError("Statistics not implemented for INT96 physical type") - - expected_physical_type = _PRIMITIVE_TO_PHYSICAL[iceberg_type] + expected_physical_type = _primitive_to_phyisical(iceberg_type) if expected_physical_type != physical_type_string: raise ValueError( f"Unexpected physical type {physical_type_string} for {iceberg_type}, expected {expected_physical_type}" diff --git a/pyiceberg/manifest.py b/pyiceberg/manifest.py index 54bd951db5..ec7c218b1c 100644 --- a/pyiceberg/manifest.py +++ b/pyiceberg/manifest.py @@ -58,6 +58,7 @@ UNASSIGNED_SEQ = -1 DEFAULT_BLOCK_SIZE = 67108864 # 64 * 1024 * 1024 +DEFAULT_READ_VERSION: Literal[2] = 2 class DataFileContent(int, Enum): @@ -99,101 +100,185 @@ def __repr__(self) -> str: return f"FileFormat.{self.name}" -DATA_FILE_TYPE_V1 = StructType( - NestedField( - field_id=134, - name="content", - field_type=IntegerType(), - required=False, - doc="Contents of the file: 0=data, 1=position deletes, 2=equality deletes", - initial_default=DataFileContent.DATA, +DATA_FILE_TYPE: Dict[int, StructType] = { + 1: StructType( + NestedField(field_id=100, name="file_path", field_type=StringType(), required=True, doc="Location URI with FS scheme"), + NestedField( + field_id=101, + name="file_format", + field_type=StringType(), + required=True, + doc="File format name: avro, orc, or parquet", + ), + NestedField( + field_id=102, + name="partition", + field_type=StructType(), + required=True, + doc="Partition data tuple, schema based on the partition spec", + ), + NestedField(field_id=103, name="record_count", field_type=LongType(), required=True, doc="Number of records in the file"), + NestedField( + field_id=104, name="file_size_in_bytes", field_type=LongType(), required=True, doc="Total file size in bytes" + ), + NestedField( + field_id=105, + name="block_size_in_bytes", + field_type=LongType(), + required=True, + doc="Deprecated. Always write a default in v1. Do not write in v2.", + write_default=DEFAULT_BLOCK_SIZE, + ), + NestedField( + field_id=108, + name="column_sizes", + field_type=MapType(key_id=117, key_type=IntegerType(), value_id=118, value_type=LongType()), + required=False, + doc="Map of column id to total size on disk", + ), + NestedField( + field_id=109, + name="value_counts", + field_type=MapType(key_id=119, key_type=IntegerType(), value_id=120, value_type=LongType()), + required=False, + doc="Map of column id to total count, including null and NaN", + ), + NestedField( + field_id=110, + name="null_value_counts", + field_type=MapType(key_id=121, key_type=IntegerType(), value_id=122, value_type=LongType()), + required=False, + doc="Map of column id to null value count", + ), + NestedField( + field_id=137, + name="nan_value_counts", + field_type=MapType(key_id=138, key_type=IntegerType(), value_id=139, value_type=LongType()), + required=False, + doc="Map of column id to number of NaN values in the column", + ), + NestedField( + field_id=125, + name="lower_bounds", + field_type=MapType(key_id=126, key_type=IntegerType(), value_id=127, value_type=BinaryType()), + required=False, + doc="Map of column id to lower bound", + ), + NestedField( + field_id=128, + name="upper_bounds", + field_type=MapType(key_id=129, key_type=IntegerType(), value_id=130, value_type=BinaryType()), + required=False, + doc="Map of column id to upper bound", + ), + NestedField( + field_id=131, name="key_metadata", field_type=BinaryType(), required=False, doc="Encryption key metadata blob" + ), + NestedField( + field_id=132, + name="split_offsets", + field_type=ListType(element_id=133, element_type=LongType(), element_required=True), + required=False, + doc="Splittable offsets", + ), + NestedField(field_id=140, name="sort_order_id", field_type=IntegerType(), required=False, doc="Sort order ID"), ), - NestedField(field_id=100, name="file_path", field_type=StringType(), required=True, doc="Location URI with FS scheme"), - NestedField( - field_id=101, - name="file_format", - field_type=StringType(), - required=True, - doc="File format name: avro, orc, or parquet", + 2: StructType( + NestedField( + field_id=134, + name="content", + field_type=IntegerType(), + required=True, + doc="File format name: avro, orc, or parquet", + initial_default=DataFileContent.DATA, + ), + NestedField(field_id=100, name="file_path", field_type=StringType(), required=True, doc="Location URI with FS scheme"), + NestedField( + field_id=101, + name="file_format", + field_type=StringType(), + required=True, + doc="File format name: avro, orc, or parquet", + ), + NestedField( + field_id=102, + name="partition", + field_type=StructType(), + required=True, + doc="Partition data tuple, schema based on the partition spec", + ), + NestedField(field_id=103, name="record_count", field_type=LongType(), required=True, doc="Number of records in the file"), + NestedField( + field_id=104, name="file_size_in_bytes", field_type=LongType(), required=True, doc="Total file size in bytes" + ), + NestedField( + field_id=108, + name="column_sizes", + field_type=MapType(key_id=117, key_type=IntegerType(), value_id=118, value_type=LongType()), + required=False, + doc="Map of column id to total size on disk", + ), + NestedField( + field_id=109, + name="value_counts", + field_type=MapType(key_id=119, key_type=IntegerType(), value_id=120, value_type=LongType()), + required=False, + doc="Map of column id to total count, including null and NaN", + ), + NestedField( + field_id=110, + name="null_value_counts", + field_type=MapType(key_id=121, key_type=IntegerType(), value_id=122, value_type=LongType()), + required=False, + doc="Map of column id to null value count", + ), + NestedField( + field_id=137, + name="nan_value_counts", + field_type=MapType(key_id=138, key_type=IntegerType(), value_id=139, value_type=LongType()), + required=False, + doc="Map of column id to number of NaN values in the column", + ), + NestedField( + field_id=125, + name="lower_bounds", + field_type=MapType(key_id=126, key_type=IntegerType(), value_id=127, value_type=BinaryType()), + required=False, + doc="Map of column id to lower bound", + ), + NestedField( + field_id=128, + name="upper_bounds", + field_type=MapType(key_id=129, key_type=IntegerType(), value_id=130, value_type=BinaryType()), + required=False, + doc="Map of column id to upper bound", + ), + NestedField( + field_id=131, name="key_metadata", field_type=BinaryType(), required=False, doc="Encryption key metadata blob" + ), + NestedField( + field_id=132, + name="split_offsets", + field_type=ListType(element_id=133, element_type=LongType(), element_required=True), + required=False, + doc="Splittable offsets", + ), + NestedField( + field_id=135, + name="equality_ids", + field_type=ListType(element_id=136, element_type=LongType(), element_required=True), + required=False, + doc="Field ids used to determine row equality in equality delete files.", + ), + NestedField( + field_id=140, + name="sort_order_id", + field_type=IntegerType(), + required=False, + doc="ID representing sort order for this file", + ), ), - NestedField( - field_id=102, - name="partition", - field_type=StructType(), - required=True, - doc="Partition data tuple, schema based on the partition spec", - ), - NestedField(field_id=103, name="record_count", field_type=LongType(), required=True, doc="Number of records in the file"), - NestedField(field_id=104, name="file_size_in_bytes", field_type=LongType(), required=True, doc="Total file size in bytes"), - NestedField( - field_id=105, - name="block_size_in_bytes", - field_type=LongType(), - required=False, - doc="Deprecated. Always write a default in v1. Do not write in v2.", - ), - NestedField( - field_id=108, - name="column_sizes", - field_type=MapType(key_id=117, key_type=IntegerType(), value_id=118, value_type=LongType()), - required=False, - doc="Map of column id to total size on disk", - ), - NestedField( - field_id=109, - name="value_counts", - field_type=MapType(key_id=119, key_type=IntegerType(), value_id=120, value_type=LongType()), - required=False, - doc="Map of column id to total count, including null and NaN", - ), - NestedField( - field_id=110, - name="null_value_counts", - field_type=MapType(key_id=121, key_type=IntegerType(), value_id=122, value_type=LongType()), - required=False, - doc="Map of column id to null value count", - ), - NestedField( - field_id=137, - name="nan_value_counts", - field_type=MapType(key_id=138, key_type=IntegerType(), value_id=139, value_type=LongType()), - required=False, - doc="Map of column id to number of NaN values in the column", - ), - NestedField( - field_id=125, - name="lower_bounds", - field_type=MapType(key_id=126, key_type=IntegerType(), value_id=127, value_type=BinaryType()), - required=False, - doc="Map of column id to lower bound", - ), - NestedField( - field_id=128, - name="upper_bounds", - field_type=MapType(key_id=129, key_type=IntegerType(), value_id=130, value_type=BinaryType()), - required=False, - doc="Map of column id to upper bound", - ), - NestedField(field_id=131, name="key_metadata", field_type=BinaryType(), required=False, doc="Encryption key metadata blob"), - NestedField( - field_id=132, - name="split_offsets", - field_type=ListType(element_id=133, element_type=LongType(), element_required=True), - required=False, - doc="Splittable offsets", - ), - NestedField( - field_id=135, - name="equality_ids", - field_type=ListType(element_id=136, element_type=LongType(), element_required=True), - required=False, - doc="Equality comparison field IDs", - ), - NestedField(field_id=140, name="sort_order_id", field_type=IntegerType(), required=False, doc="Sort order ID"), - NestedField(field_id=141, name="spec_id", field_type=IntegerType(), required=False, doc="Partition spec ID"), -) - -DATA_FILE_TYPE_V2 = StructType(*[field for field in DATA_FILE_TYPE_V1.fields if field.field_id != 105]) +} @singledispatch @@ -238,7 +323,7 @@ def data_file_with_partition(partition_type: StructType, format_version: Literal ) if field.field_id == 102 else field - for field in (DATA_FILE_TYPE_V1.fields if format_version == 1 else DATA_FILE_TYPE_V2.fields) + for field in DATA_FILE_TYPE[format_version].fields ] ) @@ -251,7 +336,6 @@ class DataFile(Record): "partition", "record_count", "file_size_in_bytes", - "block_size_in_bytes", "column_sizes", "value_counts", "null_value_counts", @@ -270,7 +354,6 @@ class DataFile(Record): partition: Record record_count: int file_size_in_bytes: int - block_size_in_bytes: Optional[int] column_sizes: Dict[int, int] value_counts: Dict[int, int] null_value_counts: Dict[int, int] @@ -290,10 +373,10 @@ def __setattr__(self, name: str, value: Any) -> None: value = FileFormat[value] super().__setattr__(name, value) - def __init__(self, format_version: Literal[1, 2] = 1, *data: Any, **named_data: Any) -> None: + def __init__(self, format_version: Literal[1, 2] = DEFAULT_READ_VERSION, *data: Any, **named_data: Any) -> None: super().__init__( *data, - **{"struct": DATA_FILE_TYPE_V1 if format_version == 1 else DATA_FILE_TYPE_V2, **named_data}, + **{"struct": DATA_FILE_TYPE[format_version], **named_data}, ) def __hash__(self) -> int: @@ -308,22 +391,29 @@ def __eq__(self, other: Any) -> bool: return self.file_path == other.file_path if isinstance(other, DataFile) else False -MANIFEST_ENTRY_SCHEMA = Schema( - NestedField(0, "status", IntegerType(), required=True), - NestedField(1, "snapshot_id", LongType(), required=False), - NestedField(3, "data_sequence_number", LongType(), required=False), - NestedField(4, "file_sequence_number", LongType(), required=False), - NestedField(2, "data_file", DATA_FILE_TYPE_V1, required=True), -) +MANIFEST_ENTRY_SCHEMAS = { + 1: Schema( + NestedField(0, "status", IntegerType(), required=True), + NestedField(1, "snapshot_id", LongType(), required=True), + NestedField(2, "data_file", DATA_FILE_TYPE[1], required=True), + ), + 2: Schema( + NestedField(0, "status", IntegerType(), required=True), + NestedField(1, "snapshot_id", LongType(), required=False), + NestedField(3, "data_sequence_number", LongType(), required=False), + NestedField(4, "file_sequence_number", LongType(), required=False), + NestedField(2, "data_file", DATA_FILE_TYPE[2], required=True), + ), +} -MANIFEST_ENTRY_SCHEMA_STRUCT = MANIFEST_ENTRY_SCHEMA.as_struct() +MANIFEST_ENTRY_SCHEMAS_STRUCT = {format_version: schema.as_struct() for format_version, schema in MANIFEST_ENTRY_SCHEMAS.items()} -def manifest_entry_schema_with_data_file(data_file: StructType) -> Schema: +def manifest_entry_schema_with_data_file(format_version: Literal[1, 2], data_file: StructType) -> Schema: return Schema( *[ NestedField(2, "data_file", data_file, required=True) if field.field_id == 2 else field - for field in MANIFEST_ENTRY_SCHEMA.fields + for field in MANIFEST_ENTRY_SCHEMAS[format_version].fields ] ) @@ -337,7 +427,7 @@ class ManifestEntry(Record): data_file: DataFile def __init__(self, *data: Any, **named_data: Any) -> None: - super().__init__(*data, **{"struct": MANIFEST_ENTRY_SCHEMA_STRUCT, **named_data}) + super().__init__(*data, **{"struct": MANIFEST_ENTRY_SCHEMAS_STRUCT[DEFAULT_READ_VERSION], **named_data}) PARTITION_FIELD_SUMMARY_TYPE = StructType( @@ -489,12 +579,12 @@ def fetch_manifest_entry(self, io: FileIO, discard_deleted: bool = True) -> List input_file = io.new_input(self.manifest_path) with AvroFile[ManifestEntry]( input_file, - MANIFEST_ENTRY_SCHEMA, + MANIFEST_ENTRY_SCHEMAS[DEFAULT_READ_VERSION], read_types={-1: ManifestEntry, 2: DataFile}, read_enums={0: ManifestEntryStatus, 101: FileFormat, 134: DataFileContent}, ) as reader: return [ - _inherit_sequence_number(entry, self) + _inherit_from_manifest(entry, self) for entry in reader if not discard_deleted or entry.status != ManifestEntryStatus.DELETED ] @@ -519,18 +609,24 @@ def read_manifest_list(input_file: InputFile) -> Iterator[ManifestFile]: yield from reader -def _inherit_sequence_number(entry: ManifestEntry, manifest: ManifestFile) -> ManifestEntry: - """Inherits the sequence numbers. +def _inherit_from_manifest(entry: ManifestEntry, manifest: ManifestFile) -> ManifestEntry: + """ + Inherits properties from manifest file. + + The properties that will be inherited are: + - sequence numbers + - partition spec id. - More information in the spec: https://iceberg.apache.org/spec/#sequence-number-inheritance + More information about inheriting sequence numbers: https://iceberg.apache.org/spec/#sequence-number-inheritance Args: - entry: The manifest entry that has null sequence numbers. - manifest: The manifest that has a sequence number. + entry: The manifest entry. + manifest: The manifest file. Returns: - The manifest entry with the sequence numbers set. + The manifest entry with properties inherited. """ + # Inherit sequence numbers. # The snapshot_id is required in V1, inherit with V2 when null if entry.snapshot_id is None: entry.snapshot_id = manifest.added_snapshot_id @@ -546,6 +642,9 @@ def _inherit_sequence_number(entry: ManifestEntry, manifest: ManifestFile) -> Ma # Only available in V2, always 0 in V1 entry.file_sequence_number = manifest.sequence_number + # Inherit partition spec id. + entry.data_file.spec_id = manifest.partition_spec_id + return entry @@ -603,10 +702,26 @@ def __exit__( def content(self) -> ManifestContent: ... + @property @abstractmethod - def new_writer(self) -> AvroOutputFile[ManifestEntry]: + def version(self) -> Literal[1, 2]: ... + def _with_partition(self, format_version: Literal[1, 2]) -> Schema: + data_file_type = data_file_with_partition( + format_version=format_version, partition_type=self._spec.partition_type(self._schema) + ) + return manifest_entry_schema_with_data_file(format_version=format_version, data_file=data_file_type) + + def new_writer(self) -> AvroOutputFile[ManifestEntry]: + return AvroOutputFile[ManifestEntry]( + output_file=self._output_file, + file_schema=self._with_partition(self.version), + record_schema=self._with_partition(DEFAULT_READ_VERSION), + schema_name="manifest_entry", + metadata=self._meta, + ) + @abstractmethod def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: ... @@ -678,15 +793,12 @@ def __init__(self, spec: PartitionSpec, schema: Schema, output_file: OutputFile, def content(self) -> ManifestContent: return ManifestContent.DATA - def new_writer(self) -> AvroOutputFile[ManifestEntry]: - v1_data_file_type = data_file_with_partition(self._spec.partition_type(self._schema), format_version=1) - v1_manifest_entry_schema = manifest_entry_schema_with_data_file(v1_data_file_type) - return AvroOutputFile[ManifestEntry](self._output_file, v1_manifest_entry_schema, "manifest_entry", self._meta) + @property + def version(self) -> Literal[1, 2]: + return 1 def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: - wrapped_entry = ManifestEntry(*entry.record_fields()) - wrapped_entry.data_file.block_size_in_bytes = DEFAULT_BLOCK_SIZE - return wrapped_entry + return entry class ManifestWriterV2(ManifestWriter): @@ -708,10 +820,9 @@ def __init__(self, spec: PartitionSpec, schema: Schema, output_file: OutputFile, def content(self) -> ManifestContent: return ManifestContent.DATA - def new_writer(self) -> AvroOutputFile[ManifestEntry]: - v2_data_file_type = data_file_with_partition(self._spec.partition_type(self._schema), format_version=2) - v2_manifest_entry_schema = manifest_entry_schema_with_data_file(v2_data_file_type) - return AvroOutputFile[ManifestEntry](self._output_file, v2_manifest_entry_schema, "manifest_entry", self._meta) + @property + def version(self) -> Literal[1, 2]: + return 2 def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: if entry.data_sequence_number is None: @@ -719,35 +830,7 @@ def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: raise ValueError(f"Found unassigned sequence number for an entry from snapshot: {entry.snapshot_id}") if entry.status != ManifestEntryStatus.ADDED: raise ValueError("Only entries with status ADDED can have null sequence number") - # In v2, we should not write block_size_in_bytes field - wrapped_data_file_v2_debug = DataFile( - format_version=2, - content=entry.data_file.content, - file_path=entry.data_file.file_path, - file_format=entry.data_file.file_format, - partition=entry.data_file.partition, - record_count=entry.data_file.record_count, - file_size_in_bytes=entry.data_file.file_size_in_bytes, - column_sizes=entry.data_file.column_sizes, - value_counts=entry.data_file.value_counts, - null_value_counts=entry.data_file.null_value_counts, - nan_value_counts=entry.data_file.nan_value_counts, - lower_bounds=entry.data_file.lower_bounds, - upper_bounds=entry.data_file.upper_bounds, - key_metadata=entry.data_file.key_metadata, - split_offsets=entry.data_file.split_offsets, - equality_ids=entry.data_file.equality_ids, - sort_order_id=entry.data_file.sort_order_id, - spec_id=entry.data_file.spec_id, - ) - wrapped_entry = ManifestEntry( - status=entry.status, - snapshot_id=entry.snapshot_id, - data_sequence_number=entry.data_sequence_number, - file_sequence_number=entry.file_sequence_number, - data_file=wrapped_data_file_v2_debug, - ) - return wrapped_entry + return entry def write_manifest( @@ -775,7 +858,9 @@ def __init__(self, output_file: OutputFile, meta: Dict[str, str]): def __enter__(self) -> ManifestListWriter: """Open the writer for writing.""" - self._writer = AvroOutputFile[ManifestFile](self._output_file, MANIFEST_FILE_SCHEMA, "manifest_file", self._meta) + self._writer = AvroOutputFile[ManifestFile]( + output_file=self._output_file, file_schema=MANIFEST_FILE_SCHEMA, schema_name="manifest_file", metadata=self._meta + ) self._writer.__enter__() return self diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 6e71a40c2d..ad36255dab 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -75,6 +75,7 @@ from pyiceberg.typedef import ( EMPTY_DICT, IcebergBaseModel, + IcebergRootModel, Identifier, KeyDefaultDict, Properties, @@ -165,7 +166,7 @@ def _append_requirements(self, *new_requirements: TableRequirement) -> Transacti self._requirements = self._requirements + new_requirements return self - def set_table_version(self, format_version: Literal[1, 2]) -> Transaction: + def upgrade_table_version(self, format_version: Literal[1, 2]) -> Transaction: """Set the table to a certain version. Args: @@ -174,7 +175,15 @@ def set_table_version(self, format_version: Literal[1, 2]) -> Transaction: Returns: The alter table builder. """ - raise NotImplementedError("Not yet implemented") + if format_version not in {1, 2}: + raise ValueError(f"Unsupported table format version: {format_version}") + + if format_version < self._table.metadata.format_version: + raise ValueError(f"Cannot downgrade v{self._table.metadata.format_version} table to v{format_version}") + if format_version > self._table.metadata.format_version: + return self._append_updates(UpgradeFormatVersionUpdate(format_version=format_version)) + else: + return self def set_properties(self, **updates: str) -> Transaction: """Set properties. @@ -403,8 +412,25 @@ class AssertDefaultSortOrderId(TableRequirement): default_sort_order_id: int = Field(..., alias="default-sort-order-id") +class Namespace(IcebergRootModel[List[str]]): + """Reference to one or more levels of a namespace.""" + + root: List[str] = Field( + ..., + description='Reference to one or more levels of a namespace', + example=['accounting', 'tax'], + ) + + +class TableIdentifier(IcebergBaseModel): + """Fully Qualified identifier to a table.""" + + namespace: Namespace + name: str + + class CommitTableRequest(IcebergBaseModel): - identifier: Identifier = Field() + identifier: TableIdentifier = Field() requirements: Tuple[SerializeAsAny[TableRequirement], ...] = Field(default_factory=tuple) updates: Tuple[SerializeAsAny[TableUpdate], ...] = Field(default_factory=tuple) @@ -464,6 +490,10 @@ def scan( limit=limit, ) + @property + def format_version(self) -> Literal[1, 2]: + return self.metadata.format_version + def schema(self) -> Schema: """Return the schema for this table.""" return next(schema for schema in self.metadata.schemas if schema.schema_id == self.metadata.current_schema_id) @@ -499,6 +529,13 @@ def location(self) -> str: """Return the table's base location.""" return self.metadata.location + @property + def last_sequence_number(self) -> int: + return self.metadata.last_sequence_number + + def _next_sequence_number(self) -> int: + return INITIAL_SEQUENCE_NUMBER if self.format_version == 1 else self.last_sequence_number + 1 + def new_snapshot_id(self) -> int: """Generate a new snapshot-id that's not in use.""" snapshot_id = _generate_snapshot_id() @@ -535,7 +572,11 @@ def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive def _do_commit(self, updates: Tuple[TableUpdate, ...], requirements: Tuple[TableRequirement, ...]) -> None: response = self.catalog._commit_table( # pylint: disable=W0212 - CommitTableRequest(identifier=self.identifier[1:], updates=updates, requirements=requirements) + CommitTableRequest( + identifier=TableIdentifier(namespace=self.identifier[:-1], name=self.identifier[-1]), + updates=updates, + requirements=requirements, + ) ) # pylint: disable=W0212 self.metadata = response.metadata self.metadata_location = response.metadata_location diff --git a/pyiceberg/types.py b/pyiceberg/types.py index 12ea831f08..b8fdedea51 100644 --- a/pyiceberg/types.py +++ b/pyiceberg/types.py @@ -51,7 +51,7 @@ from pydantic_core.core_schema import ValidatorFunctionWrapHandler from pyiceberg.exceptions import ValidationError -from pyiceberg.typedef import IcebergBaseModel, IcebergRootModel +from pyiceberg.typedef import IcebergBaseModel, IcebergRootModel, L from pyiceberg.utils.parsing import ParseNumberFromBrackets from pyiceberg.utils.singleton import Singleton @@ -282,6 +282,7 @@ class NestedField(IcebergType): required: bool = Field(default=True) doc: Optional[str] = Field(default=None, repr=False) initial_default: Optional[Any] = Field(alias="initial-default", default=None, repr=False) + write_default: Optional[L] = Field(alias="write-default", default=None, repr=False) # type: ignore def __init__( self, @@ -291,6 +292,7 @@ def __init__( required: bool = True, doc: Optional[str] = None, initial_default: Optional[Any] = None, + write_default: Optional[L] = None, **data: Any, ): # We need an init when we want to use positional arguments, but @@ -301,6 +303,7 @@ def __init__( data["required"] = required data["doc"] = doc data["initial-default"] = initial_default + data["write-default"] = write_default super().__init__(**data) def __str__(self) -> str: diff --git a/pyiceberg/utils/config.py b/pyiceberg/utils/config.py index bd15828cba..7ca3382ea1 100644 --- a/pyiceberg/utils/config.py +++ b/pyiceberg/utils/config.py @@ -43,7 +43,7 @@ def merge_config(lhs: RecursiveDict, rhs: RecursiveDict) -> RecursiveDict: new_config[rhs_key] = merge_config(lhs_value, rhs_value) else: # Take the non-null value, with precedence on rhs - new_config[rhs_key] = lhs_value or rhs_value + new_config[rhs_key] = rhs_value or lhs_value else: # New key new_config[rhs_key] = rhs_value diff --git a/pyiceberg/utils/schema_conversion.py b/pyiceberg/utils/schema_conversion.py index 74d0ae9ee7..d4b7aab4f1 100644 --- a/pyiceberg/utils/schema_conversion.py +++ b/pyiceberg/utils/schema_conversion.py @@ -48,6 +48,7 @@ TimeType, UUIDType, ) +from pyiceberg.utils.decimal import decimal_required_bytes logger = logging.getLogger(__name__) @@ -565,10 +566,17 @@ def map(self, map_type: MapType, key_result: AvroType, value_result: AvroType) - } def visit_fixed(self, fixed_type: FixedType) -> AvroType: - return {"type": "fixed", "size": len(fixed_type)} + return {"type": "fixed", "size": len(fixed_type), "name": f"fixed_{len(fixed_type)}"} def visit_decimal(self, decimal_type: DecimalType) -> AvroType: - return {"type": "bytes", "logicalType": "decimal", "precision": decimal_type.precision, "scale": decimal_type.scale} + return { + "type": "fixed", + "size": decimal_required_bytes(decimal_type.precision), + "logicalType": "decimal", + "precision": decimal_type.precision, + "scale": decimal_type.scale, + "name": f"decimal_{decimal_type.precision}_{decimal_type.scale}", + } def visit_boolean(self, boolean_type: BooleanType) -> AvroType: return "boolean" @@ -603,7 +611,7 @@ def visit_string(self, string_type: StringType) -> AvroType: return "string" def visit_uuid(self, uuid_type: UUIDType) -> AvroType: - return {"type": "fixed", "size": "16", "logicalType": "uuid"} + return {"type": "fixed", "size": 16, "logicalType": "uuid", "name": "uuid_fixed"} def visit_binary(self, binary_type: BinaryType) -> AvroType: return "bytes" diff --git a/pyproject.toml b/pyproject.toml index a6a0d7570c..89d766f623 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ # under the License. [tool.poetry] name = "pyiceberg" -version = "0.5.0" +version = "0.5.1" readme = "README.md" homepage = "https://py.iceberg.apache.org/" repository = "https://github.com/apache/iceberg/" @@ -75,13 +75,13 @@ sqlalchemy = { version = "^2.0.18", optional = true } pytest = "7.4.2" pytest-checkdocs = "2.10.1" pre-commit = "3.4.0" -fastavro = "1.8.3" -coverage = { version = "^7.3.1", extras = ["toml"] } +fastavro = "1.8.4" +coverage = { version = "^7.3.2", extras = ["toml"] } requests-mock = "1.11.0" -moto = "^4.2.5" +moto = "^4.2.6" typing-extensions = "4.8.0" pytest-mock = "3.11.1" -cython = "3.0.2" +cython = "3.0.3" [[tool.mypy.overrides]] module = "pytest_mock.*" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000000..13a83393a9 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/avro/test_decoder.py b/tests/avro/test_decoder.py index fd660247cd..bbcc7394f4 100644 --- a/tests/avro/test_decoder.py +++ b/tests/avro/test_decoder.py @@ -27,7 +27,7 @@ from pyiceberg.avro.decoder import BinaryDecoder, StreamingBinaryDecoder, new_decoder from pyiceberg.avro.decoder_fast import CythonBinaryDecoder -from pyiceberg.avro.resolver import resolve +from pyiceberg.avro.resolver import resolve_reader from pyiceberg.io import InputStream from pyiceberg.types import DoubleType, FloatType @@ -194,7 +194,7 @@ def test_skip_utf8(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_read_int_as_float(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: decoder = decoder_class(b"\x00\x00\x9A\x41") - reader = resolve(FloatType(), DoubleType()) + reader = resolve_reader(FloatType(), DoubleType()) assert reader.read(decoder) == 19.25 diff --git a/tests/avro/test_file.py b/tests/avro/test_file.py index e9dcc7eca1..74458fd923 100644 --- a/tests/avro/test_file.py +++ b/tests/avro/test_file.py @@ -30,7 +30,8 @@ from pyiceberg.avro.file import META_SCHEMA, AvroFileHeader from pyiceberg.io.pyarrow import PyArrowFileIO from pyiceberg.manifest import ( - MANIFEST_ENTRY_SCHEMA, + DEFAULT_BLOCK_SIZE, + MANIFEST_ENTRY_SCHEMAS, DataFile, DataFileContent, FileFormat, @@ -116,7 +117,73 @@ def todict(obj: Any) -> Any: return obj -def test_write_manifest_entry_with_iceberg_read_with_fastavro() -> None: +def test_write_manifest_entry_with_iceberg_read_with_fastavro_v1() -> None: + data_file = DataFile( + content=DataFileContent.DATA, + file_path="s3://some-path/some-file.parquet", + file_format=FileFormat.PARQUET, + partition=Record(), + record_count=131327, + file_size_in_bytes=220669226, + column_sizes={1: 220661854}, + value_counts={1: 131327}, + null_value_counts={1: 0}, + nan_value_counts={}, + lower_bounds={1: b"aaaaaaaaaaaaaaaa"}, + upper_bounds={1: b"zzzzzzzzzzzzzzzz"}, + key_metadata=b"\xde\xad\xbe\xef", + split_offsets=[4, 133697593], + equality_ids=[], + sort_order_id=4, + ) + entry = ManifestEntry( + status=ManifestEntryStatus.ADDED, + snapshot_id=8638475580105682862, + data_sequence_number=0, + file_sequence_number=0, + data_file=data_file, + ) + + additional_metadata = {"foo": "bar"} + + with TemporaryDirectory() as tmpdir: + tmp_avro_file = tmpdir + "/manifest_entry.avro" + + with avro.AvroOutputFile[ManifestEntry]( + output_file=PyArrowFileIO().new_output(tmp_avro_file), + file_schema=MANIFEST_ENTRY_SCHEMAS[1], + schema_name="manifest_entry", + record_schema=MANIFEST_ENTRY_SCHEMAS[2], + metadata=additional_metadata, + ) as out: + out.write_block([entry]) + + with open(tmp_avro_file, "rb") as fo: + r = reader(fo=fo) + + for k, v in additional_metadata.items(): + assert k in r.metadata + assert v == r.metadata[k] + + it = iter(r) + + fa_entry = next(it) + + v2_entry = todict(entry) + + # These are not written in V1 + del v2_entry['data_sequence_number'] + del v2_entry['file_sequence_number'] + del v2_entry['data_file']['content'] + del v2_entry['data_file']['equality_ids'] + + # Required in V1 + v2_entry['data_file']['block_size_in_bytes'] = DEFAULT_BLOCK_SIZE + + assert v2_entry == fa_entry + + +def test_write_manifest_entry_with_iceberg_read_with_fastavro_v2() -> None: data_file = DataFile( content=DataFileContent.DATA, file_path="s3://some-path/some-file.parquet", @@ -124,7 +191,6 @@ def test_write_manifest_entry_with_iceberg_read_with_fastavro() -> None: partition=Record(), record_count=131327, file_size_in_bytes=220669226, - block_size_in_bytes=67108864, column_sizes={1: 220661854}, value_counts={1: 131327}, null_value_counts={1: 0}, @@ -135,7 +201,6 @@ def test_write_manifest_entry_with_iceberg_read_with_fastavro() -> None: split_offsets=[4, 133697593], equality_ids=[], sort_order_id=4, - spec_id=3, ) entry = ManifestEntry( status=ManifestEntryStatus.ADDED, @@ -151,7 +216,10 @@ def test_write_manifest_entry_with_iceberg_read_with_fastavro() -> None: tmp_avro_file = tmpdir + "/manifest_entry.avro" with avro.AvroOutputFile[ManifestEntry]( - PyArrowFileIO().new_output(tmp_avro_file), MANIFEST_ENTRY_SCHEMA, "manifest_entry", additional_metadata + output_file=PyArrowFileIO().new_output(tmp_avro_file), + file_schema=MANIFEST_ENTRY_SCHEMAS[2], + schema_name="manifest_entry", + metadata=additional_metadata, ) as out: out.write_block([entry]) @@ -169,7 +237,8 @@ def test_write_manifest_entry_with_iceberg_read_with_fastavro() -> None: assert todict(entry) == fa_entry -def test_write_manifest_entry_with_fastavro_read_with_iceberg() -> None: +@pytest.mark.parametrize("format_version", [1, 2]) +def test_write_manifest_entry_with_fastavro_read_with_iceberg(format_version: int) -> None: data_file = DataFile( content=DataFileContent.DATA, file_path="s3://some-path/some-file.parquet", @@ -189,6 +258,9 @@ def test_write_manifest_entry_with_fastavro_read_with_iceberg() -> None: sort_order_id=4, spec_id=3, ) + if format_version == 1: + data_file.block_size_in_bytes = DEFAULT_BLOCK_SIZE + entry = ManifestEntry( status=ManifestEntryStatus.ADDED, snapshot_id=8638475580105682862, @@ -200,14 +272,14 @@ def test_write_manifest_entry_with_fastavro_read_with_iceberg() -> None: with TemporaryDirectory() as tmpdir: tmp_avro_file = tmpdir + "/manifest_entry.avro" - schema = AvroSchemaConversion().iceberg_to_avro(MANIFEST_ENTRY_SCHEMA, schema_name="manifest_entry") + schema = AvroSchemaConversion().iceberg_to_avro(MANIFEST_ENTRY_SCHEMAS[format_version], schema_name="manifest_entry") with open(tmp_avro_file, "wb") as out: writer(out, schema, [todict(entry)]) with avro.AvroFile[ManifestEntry]( PyArrowFileIO().new_input(tmp_avro_file), - MANIFEST_ENTRY_SCHEMA, + MANIFEST_ENTRY_SCHEMAS[format_version], {-1: ManifestEntry, 2: DataFile}, ) as avro_reader: it = iter(avro_reader) @@ -286,5 +358,12 @@ def __init__(self, *data: Any, **named_data: Any) -> None: it = iter(avro_reader) avro_entry = next(it) + # read with fastavro + with open(tmp_avro_file, "rb") as fo: + r = reader(fo=fo) + it_fastavro = iter(r) + avro_entry_read_with_fastavro = list(next(it_fastavro).values()) + for idx, field in enumerate(all_primitives_schema.as_struct()): assert record[idx] == avro_entry[idx], f"Invalid {field}" + assert record[idx] == avro_entry_read_with_fastavro[idx], f"Invalid {field} read with fastavro" diff --git a/tests/avro/test_reader.py b/tests/avro/test_reader.py index a3a502bcff..48ee8911da 100644 --- a/tests/avro/test_reader.py +++ b/tests/avro/test_reader.py @@ -41,7 +41,7 @@ ) from pyiceberg.avro.resolver import construct_reader from pyiceberg.io.pyarrow import PyArrowFileIO -from pyiceberg.manifest import MANIFEST_ENTRY_SCHEMA, DataFile, ManifestEntry +from pyiceberg.manifest import MANIFEST_ENTRY_SCHEMAS, DataFile, ManifestEntry from pyiceberg.schema import Schema from pyiceberg.typedef import Record from pyiceberg.types import ( @@ -70,7 +70,7 @@ def test_read_header(generated_manifest_entry_file: str, iceberg_manifest_entry_schema: Schema) -> None: with AvroFile[ManifestEntry]( PyArrowFileIO().new_input(generated_manifest_entry_file), - MANIFEST_ENTRY_SCHEMA, + MANIFEST_ENTRY_SCHEMAS[2], {-1: ManifestEntry, 2: DataFile}, ) as reader: header = reader.header diff --git a/tests/avro/test_resolver.py b/tests/avro/test_resolver.py index a302294755..51d2a7d8fc 100644 --- a/tests/avro/test_resolver.py +++ b/tests/avro/test_resolver.py @@ -32,12 +32,25 @@ StringReader, StructReader, ) -from pyiceberg.avro.resolver import ResolveError, resolve +from pyiceberg.avro.resolver import ResolveError, resolve_reader, resolve_writer +from pyiceberg.avro.writer import ( + BinaryWriter, + DefaultWriter, + DoubleWriter, + IntegerWriter, + ListWriter, + MapWriter, + OptionWriter, + StringWriter, + StructWriter, +) from pyiceberg.io.pyarrow import PyArrowFileIO +from pyiceberg.manifest import MANIFEST_ENTRY_SCHEMAS from pyiceberg.schema import Schema from pyiceberg.typedef import Record from pyiceberg.types import ( BinaryType, + BooleanType, DecimalType, DoubleType, FloatType, @@ -81,7 +94,7 @@ def test_resolver() -> None: NestedField(6, "preferences", MapType(7, StringType(), 8, StringType())), schema_id=1, ) - read_tree = resolve(write_schema, read_schema) + read_tree = resolve_reader(write_schema, read_schema) assert read_tree == StructReader( ( @@ -117,7 +130,7 @@ def test_resolver_new_required_field() -> None: ) with pytest.raises(ResolveError) as exc_info: - resolve(write_schema, read_schema) + resolve_reader(write_schema, read_schema) assert "2: data: required string is non-optional, and not part of the file schema" in str(exc_info.value) @@ -133,7 +146,7 @@ def test_resolver_invalid_evolution() -> None: ) with pytest.raises(ResolveError) as exc_info: - resolve(write_schema, read_schema) + resolve_reader(write_schema, read_schema) assert "Cannot promote long to double" in str(exc_info.value) @@ -147,7 +160,7 @@ def test_resolver_promotion_string_to_binary() -> None: NestedField(1, "id", BinaryType()), schema_id=1, ) - resolve(write_schema, read_schema) + resolve_reader(write_schema, read_schema) def test_resolver_promotion_binary_to_string() -> None: @@ -159,7 +172,7 @@ def test_resolver_promotion_binary_to_string() -> None: NestedField(1, "id", StringType()), schema_id=1, ) - resolve(write_schema, read_schema) + resolve_reader(write_schema, read_schema) def test_resolver_change_type() -> None: @@ -173,69 +186,69 @@ def test_resolver_change_type() -> None: ) with pytest.raises(ResolveError) as exc_info: - resolve(write_schema, read_schema) + resolve_reader(write_schema, read_schema) assert "File/read schema are not aligned for list, got map" in str(exc_info.value) def test_resolve_int_to_long() -> None: - assert resolve(IntegerType(), LongType()) == IntegerReader() + assert resolve_reader(IntegerType(), LongType()) == IntegerReader() def test_resolve_float_to_double() -> None: # We should still read floats, because it is encoded in 4 bytes - assert resolve(FloatType(), DoubleType()) == FloatReader() + assert resolve_reader(FloatType(), DoubleType()) == FloatReader() def test_resolve_decimal_to_decimal() -> None: # DecimalType(P, S) to DecimalType(P2, S) where P2 > P - assert resolve(DecimalType(19, 25), DecimalType(22, 25)) == DecimalReader(19, 25) + assert resolve_reader(DecimalType(19, 25), DecimalType(22, 25)) == DecimalReader(19, 25) def test_struct_not_aligned() -> None: with pytest.raises(ResolveError): - assert resolve(StructType(), StringType()) + assert resolve_reader(StructType(), StringType()) def test_map_not_aligned() -> None: with pytest.raises(ResolveError): - assert resolve(MapType(1, StringType(), 2, IntegerType()), StringType()) + assert resolve_reader(MapType(1, StringType(), 2, IntegerType()), StringType()) def test_primitive_not_aligned() -> None: with pytest.raises(ResolveError): - assert resolve(IntegerType(), MapType(1, StringType(), 2, IntegerType())) + assert resolve_reader(IntegerType(), MapType(1, StringType(), 2, IntegerType())) def test_integer_not_aligned() -> None: with pytest.raises(ResolveError): - assert resolve(IntegerType(), StringType()) + assert resolve_reader(IntegerType(), StringType()) def test_float_not_aligned() -> None: with pytest.raises(ResolveError): - assert resolve(FloatType(), StringType()) + assert resolve_reader(FloatType(), StringType()) def test_string_not_aligned() -> None: with pytest.raises(ResolveError): - assert resolve(StringType(), FloatType()) + assert resolve_reader(StringType(), FloatType()) def test_binary_not_aligned() -> None: with pytest.raises(ResolveError): - assert resolve(BinaryType(), FloatType()) + assert resolve_reader(BinaryType(), FloatType()) def test_decimal_not_aligned() -> None: with pytest.raises(ResolveError): - assert resolve(DecimalType(22, 19), StringType()) + assert resolve_reader(DecimalType(22, 19), StringType()) def test_resolve_decimal_to_decimal_reduce_precision() -> None: # DecimalType(P, S) to DecimalType(P2, S) where P2 > P with pytest.raises(ResolveError) as exc_info: - _ = resolve(DecimalType(19, 25), DecimalType(10, 25)) == DecimalReader(22, 25) + _ = resolve_reader(DecimalType(19, 25), DecimalType(10, 25)) == DecimalReader(22, 25) assert "Cannot reduce precision from decimal(19, 25) to decimal(10, 25)" in str(exc_info.value) @@ -293,7 +306,7 @@ def test_resolver_initial_value() -> None: schema_id=2, ) - assert resolve(write_schema, read_schema) == StructReader( + assert resolve_reader(write_schema, read_schema) == StructReader( ( (None, StringReader()), # The one we skip (0, DefaultReader("vo")), @@ -301,3 +314,95 @@ def test_resolver_initial_value() -> None: Record, read_schema.as_struct(), ) + + +def test_resolve_writer() -> None: + actual = resolve_writer(record_schema=MANIFEST_ENTRY_SCHEMAS[2], file_schema=MANIFEST_ENTRY_SCHEMAS[1]) + expected = StructWriter( + ( + (0, IntegerWriter()), + (1, IntegerWriter()), + ( + 4, + StructWriter( + ( + (1, StringWriter()), + (2, StringWriter()), + (3, StructWriter(())), + (4, IntegerWriter()), + (5, IntegerWriter()), + (None, DefaultWriter(writer=IntegerWriter(), value=67108864)), + (6, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (7, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (8, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (9, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (10, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), + (11, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), + (12, OptionWriter(option=BinaryWriter())), + (13, OptionWriter(option=ListWriter(element_writer=IntegerWriter()))), + (15, OptionWriter(option=IntegerWriter())), + ) + ), + ), + ) + ) + + assert actual == expected + + +def test_resolve_writer_promotion() -> None: + with pytest.raises(ResolveError) as exc_info: + _ = resolve_writer( + record_schema=Schema(NestedField(field_id=1, name="floating", type=DoubleType(), required=True)), + file_schema=Schema(NestedField(field_id=1, name="floating", type=FloatType(), required=True)), + ) + + assert "Cannot promote double to float" in str(exc_info.value) + + +def test_writer_ordering() -> None: + actual = resolve_writer( + record_schema=Schema( + NestedField(field_id=1, name="str", type=StringType(), required=True), + NestedField(field_id=2, name="dbl", type=DoubleType(), required=True), + ), + file_schema=Schema( + NestedField(field_id=2, name="dbl", type=DoubleType(), required=True), + NestedField(field_id=1, name="str", type=StringType(), required=True), + ), + ) + + expected = StructWriter(((1, DoubleWriter()), (0, StringWriter()))) + + assert actual == expected + + +def test_writer_one_more_field() -> None: + actual = resolve_writer( + record_schema=Schema( + NestedField(field_id=3, name="bool", type=BooleanType(), required=True), + NestedField(field_id=1, name="str", type=StringType(), required=True), + NestedField(field_id=2, name="dbl", type=DoubleType(), required=True), + ), + file_schema=Schema( + NestedField(field_id=2, name="dbl", type=DoubleType(), required=True), + NestedField(field_id=1, name="str", type=StringType(), required=True), + ), + ) + + expected = StructWriter(((2, DoubleWriter()), (1, StringWriter()))) + + assert actual == expected + + +def test_writer_missing_optional_in_read_schema() -> None: + actual = resolve_writer( + record_schema=Schema(), + file_schema=Schema( + NestedField(field_id=1, name="str", type=StringType(), required=False), + ), + ) + + expected = StructWriter(field_writers=((None, OptionWriter(option=StringWriter())),)) + + assert actual == expected diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index da121f6114..1078dd1b0a 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -46,8 +46,10 @@ AddSchemaUpdate, CommitTableRequest, CommitTableResponse, + Namespace, SetCurrentSchemaUpdate, Table, + TableIdentifier, ) from pyiceberg.table.metadata import TableMetadata, TableMetadataV1, new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder @@ -119,8 +121,8 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons for update in table_request.updates: if isinstance(update, AddSchemaUpdate): add_schema_update: AddSchemaUpdate = update - identifier = Catalog.identifier_to_tuple(table_request.identifier) - table = self.__tables[("com", *identifier)] + identifier = tuple(table_request.identifier.namespace.root) + (table_request.identifier.name,) + table = self.__tables[identifier] new_metadata = new_table_metadata( add_schema_update.schema_, table.metadata.partition_specs[0], @@ -528,7 +530,7 @@ def test_commit_table(catalog: InMemoryCatalog) -> None: # When response = given_table.catalog._commit_table( # pylint: disable=W0212 CommitTableRequest( - identifier=given_table.identifier[1:], + identifier=TableIdentifier(namespace=Namespace(given_table.identifier[:-1]), name=given_table.identifier[-1]), updates=[ AddSchemaUpdate(schema=new_schema, last_column_id=new_schema.highest_field_id), SetCurrentSchemaUpdate(schema_id=-1), diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 1c7581d24a..43313c03ce 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -15,13 +15,16 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=redefined-outer-name,unused-argument +import os +from typing import cast +from unittest import mock from uuid import UUID import pytest from requests_mock import Mocker import pyiceberg -from pyiceberg.catalog import PropertiesUpdateSummary, Table +from pyiceberg.catalog import PropertiesUpdateSummary, Table, load_catalog from pyiceberg.catalog.rest import RestCatalog from pyiceberg.exceptions import ( NamespaceAlreadyExistsError, @@ -38,12 +41,14 @@ from pyiceberg.table.snapshots import Operation, Snapshot, Summary from pyiceberg.table.sorting import SortField, SortOrder from pyiceberg.transforms import IdentityTransform, TruncateTransform +from pyiceberg.typedef import RecursiveDict from pyiceberg.types import ( BooleanType, IntegerType, NestedField, StringType, ) +from pyiceberg.utils.config import Config TEST_URI = "https://iceberg-test-catalog/" TEST_CREDENTIALS = "client:secret" @@ -943,3 +948,41 @@ def test_request_session_with_ssl_client_cert() -> None: # Missing namespace RestCatalog("rest", **catalog_properties) # type: ignore assert "Could not find the TLS certificate file, invalid path: path_to_client_cert" in str(e.value) + + +EXAMPLE_ENV = {"PYICEBERG_CATALOG__PRODUCTION__URI": TEST_URI} + + +@mock.patch.dict(os.environ, EXAMPLE_ENV) +@mock.patch("pyiceberg.catalog.Config.get_catalog_config") +def test_catalog_from_environment_variables(catalog_config_mock: mock.Mock, rest_mock: Mocker) -> None: + env_config: RecursiveDict = Config._from_environment_variables({}) + catalog_config_mock.return_value = cast(RecursiveDict, env_config.get("catalog")).get("production") + catalog = cast(RestCatalog, load_catalog("production")) + assert catalog.uri == TEST_URI + + +@mock.patch.dict(os.environ, EXAMPLE_ENV) +@mock.patch("pyiceberg.catalog._ENV_CONFIG.get_catalog_config") +def test_catalog_from_environment_variables_override(catalog_config_mock: mock.Mock, rest_mock: Mocker) -> None: + rest_mock.get( + "https://other-service.io/api/v1/config", + json={"defaults": {}, "overrides": {}}, + status_code=200, + ) + env_config: RecursiveDict = Config._from_environment_variables({}) + + catalog_config_mock.return_value = cast(RecursiveDict, env_config.get("catalog")).get("production") + catalog = cast(RestCatalog, load_catalog("production", uri="https://other-service.io/api")) + assert catalog.uri == "https://other-service.io/api" + + +def test_catalog_from_parameters_empty_env(rest_mock: Mocker) -> None: + rest_mock.get( + "https://other-service.io/api/v1/config", + json={"defaults": {}, "overrides": {}}, + status_code=200, + ) + + catalog = cast(RestCatalog, load_catalog("production", uri="https://other-service.io/api")) + assert catalog.uri == "https://other-service.io/api" diff --git a/tests/expressions/test_parser.py b/tests/expressions/test_parser.py index f4bebca066..3b21835dae 100644 --- a/tests/expressions/test_parser.py +++ b/tests/expressions/test_parser.py @@ -160,6 +160,14 @@ def test_and_or_with_parens() -> None: ) +def test_multiple_and_or() -> None: + assert And(EqualTo("foo", 1), EqualTo("bar", 2), EqualTo("baz", 3)) == parser.parse("foo = 1 and bar = 2 and baz = 3") + assert Or(EqualTo("foo", 1), EqualTo("foo", 2), EqualTo("foo", 3)) == parser.parse("foo = 1 or foo = 2 or foo = 3") + assert Or( + And(NotNull("foo"), LessThan("foo", 5)), And(GreaterThan("foo", 10), LessThan("foo", 100), IsNull("bar")) + ) == parser.parse("foo is not null and foo < 5 or (foo > 10 and foo < 100 and bar is null)") + + def test_starts_with() -> None: assert StartsWith("foo", "data") == parser.parse("foo LIKE 'data'") diff --git a/tests/test_integration.py b/tests/test_integration.py index 297749b1b7..6e874b68fa 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -46,6 +46,8 @@ TimestampType, ) +DEFAULT_PROPERTIES = {'write.parquet.compression-codec': 'zstd'} + @pytest.fixture() def catalog() -> Catalog: @@ -81,6 +83,11 @@ def table_test_all_types(catalog: Catalog) -> Table: return catalog.load_table("default.test_all_types") +@pytest.fixture() +def table_test_table_version(catalog: Catalog) -> Table: + return catalog.load_table("default.test_table_version") + + TABLE_NAME = ("default", "t1") @@ -104,25 +111,25 @@ def table(catalog: Catalog) -> Table: @pytest.mark.integration def test_table_properties(table: Table) -> None: - assert table.properties == {} + assert table.properties == DEFAULT_PROPERTIES with table.transaction() as transaction: transaction.set_properties(abc="🤪") - assert table.properties == {"abc": "🤪"} + assert table.properties == dict(**{"abc": "🤪"}, **DEFAULT_PROPERTIES) with table.transaction() as transaction: transaction.remove_properties("abc") - assert table.properties == {} + assert table.properties == DEFAULT_PROPERTIES table = table.transaction().set_properties(abc="def").commit_transaction() - assert table.properties == {"abc": "def"} + assert table.properties == dict(**{"abc": "def"}, **DEFAULT_PROPERTIES) table = table.transaction().remove_properties("abc").commit_transaction() - assert table.properties == {} + assert table.properties == DEFAULT_PROPERTIES @pytest.fixture() @@ -364,3 +371,28 @@ def test_scan_tag(test_positional_mor_deletes: Table) -> None: def test_scan_branch(test_positional_mor_deletes: Table) -> None: arrow_table = test_positional_mor_deletes.scan().use_ref("without_5").to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12] + + +@pytest.mark.integration +def test_upgrade_table_version(table_test_table_version: Table) -> None: + assert table_test_table_version.format_version == 1 + + with table_test_table_version.transaction() as transaction: + transaction.upgrade_table_version(format_version=1) + + assert table_test_table_version.format_version == 1 + + with table_test_table_version.transaction() as transaction: + transaction.upgrade_table_version(format_version=2) + + assert table_test_table_version.format_version == 2 + + with pytest.raises(ValueError) as e: # type: ignore + with table_test_table_version.transaction() as transaction: + transaction.upgrade_table_version(format_version=1) + assert "Cannot downgrade v2 table to v1" in str(e.value) + + with pytest.raises(ValueError) as e: + with table_test_table_version.transaction() as transaction: + transaction.upgrade_table_version(format_version=3) + assert "Unsupported table format version: 3" in str(e.value) diff --git a/tests/test_integration_manifest.py b/tests/test_integration_manifest.py index 34b20f271d..8191209ae6 100644 --- a/tests/test_integration_manifest.py +++ b/tests/test_integration_manifest.py @@ -26,11 +26,7 @@ from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.io.pyarrow import PyArrowFileIO -from pyiceberg.manifest import ( - DataFile, - ManifestEntry, - write_manifest, -) +from pyiceberg.manifest import DataFile, ManifestEntry, write_manifest from pyiceberg.table import Table from pyiceberg.utils.lazydict import LazyDict @@ -105,6 +101,10 @@ def test_write_sample_manifest(table_test_all_types: Table) -> None: ) wrapped_entry_v2 = ManifestEntry(*entry.record_fields()) wrapped_entry_v2.data_file = wrapped_data_file_v2_debug + wrapped_entry_v2_dict = todict(wrapped_entry_v2) + # This one should not be written + del wrapped_entry_v2_dict['data_file']['spec_id'] + with TemporaryDirectory() as tmpdir: tmp_avro_file = tmpdir + "/test_write_manifest.avro" output = PyArrowFileIO().new_output(tmp_avro_file) @@ -123,4 +123,4 @@ def test_write_sample_manifest(table_test_all_types: Table) -> None: it = iter(r) fa_entry = next(it) - assert fa_entry == todict(wrapped_entry_v2) + assert fa_entry == wrapped_entry_v2_dict diff --git a/tests/test_types.py b/tests/test_types.py index 249ee98a6f..6aed56c58f 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -18,6 +18,7 @@ import pickle from typing import Type +import pydantic_core import pytest from pyiceberg.exceptions import ValidationError @@ -208,6 +209,11 @@ def test_nested_field() -> None: assert str(field_var) == str(eval(repr(field_var))) assert field_var == pickle.loads(pickle.dumps(field_var)) + with pytest.raises(pydantic_core.ValidationError) as exc_info: + _ = (NestedField(1, "field", StringType(), required=True, write_default=(1, "a", True)),) # type: ignore + + assert "validation errors for NestedField" in str(exc_info.value) + @pytest.mark.parametrize("input_index,input_type", non_parameterized_types) @pytest.mark.parametrize("check_index,check_type", non_parameterized_types) diff --git a/tests/utils/test_config.py b/tests/utils/test_config.py index 0b6cff9d7d..11c3076d8e 100644 --- a/tests/utils/test_config.py +++ b/tests/utils/test_config.py @@ -20,7 +20,8 @@ import pytest from strictyaml import as_document -from pyiceberg.utils.config import Config, _lowercase_dictionary_keys +from pyiceberg.typedef import RecursiveDict +from pyiceberg.utils.config import Config, _lowercase_dictionary_keys, merge_config EXAMPLE_ENV = {"PYICEBERG_CATALOG__PRODUCTION__URI": "https://service.io/api"} @@ -54,3 +55,10 @@ def test_lowercase_dictionary_keys() -> None: uppercase_keys = {"UPPER": {"NESTED_UPPER": {"YES"}}} expected = {"upper": {"nested_upper": {"YES"}}} assert _lowercase_dictionary_keys(uppercase_keys) == expected # type: ignore + + +def test_merge_config() -> None: + lhs: RecursiveDict = {"common_key": "abc123"} + rhs: RecursiveDict = {"common_key": "xyz789"} + result = merge_config(lhs, rhs) + assert result["common_key"] == rhs["common_key"] diff --git a/tests/utils/test_manifest.py b/tests/utils/test_manifest.py index b2c972b8c2..08906b68ad 100644 --- a/tests/utils/test_manifest.py +++ b/tests/utils/test_manifest.py @@ -16,7 +16,7 @@ # under the License. # pylint: disable=redefined-outer-name,arguments-renamed,fixme from tempfile import TemporaryDirectory -from typing import Dict +from typing import Dict, Literal import fastavro import pytest @@ -303,7 +303,9 @@ def test_read_manifest_v2(generated_manifest_file_file_v2: str) -> None: @pytest.mark.parametrize("format_version", [1, 2]) -def test_write_manifest(generated_manifest_file_file_v1: str, generated_manifest_file_file_v2: str, format_version: int) -> None: +def test_write_manifest( + generated_manifest_file_file_v1: str, generated_manifest_file_file_v2: str, format_version: Literal[1, 2] +) -> None: io = load_file_io() snapshot = Snapshot( snapshot_id=25, @@ -327,7 +329,7 @@ def test_write_manifest(generated_manifest_file_file_v1: str, generated_manifest tmp_avro_file = tmpdir + "/test_write_manifest.avro" output = io.new_output(tmp_avro_file) with write_manifest( - format_version=format_version, # type: ignore + format_version=format_version, spec=test_spec, schema=test_schema, output_file=output, @@ -337,6 +339,7 @@ def test_write_manifest(generated_manifest_file_file_v1: str, generated_manifest writer.add_entry(entry) new_manifest = writer.to_manifest_file() with pytest.raises(RuntimeError): + # It is already closed writer.add_entry(manifest_entries[0]) expected_metadata = { @@ -345,8 +348,6 @@ def test_write_manifest(generated_manifest_file_file_v1: str, generated_manifest "partition-spec-id": str(test_spec.spec_id), "format-version": str(format_version), } - if format_version == 2: - expected_metadata["content"] = "data" _verify_metadata_with_fastavro( tmp_avro_file, expected_metadata, @@ -357,7 +358,7 @@ def test_write_manifest(generated_manifest_file_file_v1: str, generated_manifest assert manifest_entry.status == ManifestEntryStatus.ADDED assert manifest_entry.snapshot_id == 8744736658442914487 - assert manifest_entry.data_sequence_number == 0 if format_version == 1 else 3 + assert manifest_entry.data_sequence_number == -1 if format_version == 1 else 3 assert isinstance(manifest_entry.data_file, DataFile) data_file = manifest_entry.data_file @@ -371,10 +372,6 @@ def test_write_manifest(generated_manifest_file_file_v1: str, generated_manifest assert data_file.partition == Record(VendorID=1, tpep_pickup_datetime=1925) assert data_file.record_count == 19513 assert data_file.file_size_in_bytes == 388872 - if format_version == 1: - assert data_file.block_size_in_bytes == 67108864 - else: - assert data_file.block_size_in_bytes is None assert data_file.column_sizes == { 1: 53, 2: 98153, @@ -477,7 +474,7 @@ def test_write_manifest(generated_manifest_file_file_v1: str, generated_manifest @pytest.mark.parametrize("format_version", [1, 2]) def test_write_manifest_list( - generated_manifest_file_file_v1: str, generated_manifest_file_file_v2: str, format_version: int + generated_manifest_file_file_v1: str, generated_manifest_file_file_v2: str, format_version: Literal[1, 2] ) -> None: io = load_file_io() @@ -495,7 +492,7 @@ def test_write_manifest_list( path = tmp_dir + "/manifest-list.avro" output = io.new_output(path) with write_manifest_list( - format_version=format_version, output_file=output, snapshot_id=25, parent_snapshot_id=19, sequence_number=0 # type: ignore + format_version=format_version, output_file=output, snapshot_id=25, parent_snapshot_id=19, sequence_number=0 ) as writer: writer.add_manifests(demo_manifest_list) new_manifest_list = list(read_manifest_list(io.new_input(path)))