From 0ac5f9eb9209c560108540a09196003aa18dd6d3 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Wed, 7 Feb 2024 22:03:34 +0000 Subject: [PATCH 01/25] detect breaking changes --- poetry.lock | 641 ++++++++++++++++++++++++++-------------------- pyproject.toml | 5 + tests/test_api.py | 41 +++ 3 files changed, 404 insertions(+), 283 deletions(-) create mode 100644 tests/test_api.py diff --git a/poetry.lock b/poetry.lock index b3a68cce7..886af42c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -46,87 +46,87 @@ boto3 = ["boto3 (>=1.33.2,<1.34.35)"] [[package]] name = "aiohttp" -version = "3.9.2" +version = "3.9.3" description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:772fbe371788e61c58d6d3d904268e48a594ba866804d08c995ad71b144f94cb"}, - {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:edd4f1af2253f227ae311ab3d403d0c506c9b4410c7fc8d9573dec6d9740369f"}, - {file = "aiohttp-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cfee9287778399fdef6f8a11c9e425e1cb13cc9920fd3a3df8f122500978292b"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc158466f6a980a6095ee55174d1de5730ad7dec251be655d9a6a9dd7ea1ff9"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54ec82f45d57c9a65a1ead3953b51c704f9587440e6682f689da97f3e8defa35"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abeb813a18eb387f0d835ef51f88568540ad0325807a77a6e501fed4610f864e"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc91d07280d7d169f3a0f9179d8babd0ee05c79d4d891447629ff0d7d8089ec2"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b65e861f4bebfb660f7f0f40fa3eb9f2ab9af10647d05dac824390e7af8f75b7"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04fd8ffd2be73d42bcf55fd78cde7958eeee6d4d8f73c3846b7cba491ecdb570"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3d8d962b439a859b3ded9a1e111a4615357b01620a546bc601f25b0211f2da81"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:8ceb658afd12b27552597cf9a65d9807d58aef45adbb58616cdd5ad4c258c39e"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0e4ee4df741670560b1bc393672035418bf9063718fee05e1796bf867e995fad"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2dec87a556f300d3211decf018bfd263424f0690fcca00de94a837949fbcea02"}, - {file = "aiohttp-3.9.2-cp310-cp310-win32.whl", hash = "sha256:3e1a800f988ce7c4917f34096f81585a73dbf65b5c39618b37926b1238cf9bc4"}, - {file = "aiohttp-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:ea510718a41b95c236c992b89fdfc3d04cc7ca60281f93aaada497c2b4e05c46"}, - {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6aaa6f99256dd1b5756a50891a20f0d252bd7bdb0854c5d440edab4495c9f973"}, - {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a27d8c70ad87bcfce2e97488652075a9bdd5b70093f50b10ae051dfe5e6baf37"}, - {file = "aiohttp-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:54287bcb74d21715ac8382e9de146d9442b5f133d9babb7e5d9e453faadd005e"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb3d05569aa83011fcb346b5266e00b04180105fcacc63743fc2e4a1862a891"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8534e7d69bb8e8d134fe2be9890d1b863518582f30c9874ed7ed12e48abe3c4"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd9d5b989d57b41e4ff56ab250c5ddf259f32db17159cce630fd543376bd96b"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa6904088e6642609981f919ba775838ebf7df7fe64998b1a954fb411ffb4663"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda42eb410be91b349fb4ee3a23a30ee301c391e503996a638d05659d76ea4c2"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:193cc1ccd69d819562cc7f345c815a6fc51d223b2ef22f23c1a0f67a88de9a72"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b9f1cb839b621f84a5b006848e336cf1496688059d2408e617af33e3470ba204"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d22a0931848b8c7a023c695fa2057c6aaac19085f257d48baa24455e67df97ec"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4112d8ba61fbd0abd5d43a9cb312214565b446d926e282a6d7da3f5a5aa71d36"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c4ad4241b52bb2eb7a4d2bde060d31c2b255b8c6597dd8deac2f039168d14fd7"}, - {file = "aiohttp-3.9.2-cp311-cp311-win32.whl", hash = "sha256:ee2661a3f5b529f4fc8a8ffee9f736ae054adfb353a0d2f78218be90617194b3"}, - {file = "aiohttp-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:4deae2c165a5db1ed97df2868ef31ca3cc999988812e82386d22937d9d6fed52"}, - {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6f4cdba12539215aaecf3c310ce9d067b0081a0795dd8a8805fdb67a65c0572a"}, - {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:84e843b33d5460a5c501c05539809ff3aee07436296ff9fbc4d327e32aa3a326"}, - {file = "aiohttp-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8008d0f451d66140a5aa1c17e3eedc9d56e14207568cd42072c9d6b92bf19b52"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61c47ab8ef629793c086378b1df93d18438612d3ed60dca76c3422f4fbafa792"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc71f748e12284312f140eaa6599a520389273174b42c345d13c7e07792f4f57"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1c3a4d0ab2f75f22ec80bca62385db2e8810ee12efa8c9e92efea45c1849133"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a87aa0b13bbee025faa59fa58861303c2b064b9855d4c0e45ec70182bbeba1b"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2cc0d04688b9f4a7854c56c18aa7af9e5b0a87a28f934e2e596ba7e14783192"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1956e3ac376b1711c1533266dec4efd485f821d84c13ce1217d53e42c9e65f08"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:114da29f39eccd71b93a0fcacff178749a5c3559009b4a4498c2c173a6d74dff"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3f17999ae3927d8a9a823a1283b201344a0627272f92d4f3e3a4efe276972fe8"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f31df6a32217a34ae2f813b152a6f348154f948c83213b690e59d9e84020925c"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7a75307ffe31329928a8d47eae0692192327c599113d41b278d4c12b54e1bd11"}, - {file = "aiohttp-3.9.2-cp312-cp312-win32.whl", hash = "sha256:972b63d589ff8f305463593050a31b5ce91638918da38139b9d8deaba9e0fed7"}, - {file = "aiohttp-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:200dc0246f0cb5405c80d18ac905c8350179c063ea1587580e3335bfc243ba6a"}, - {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:158564d0d1020e0d3fe919a81d97aadad35171e13e7b425b244ad4337fc6793a"}, - {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da1346cd0ccb395f0ed16b113ebb626fa43b7b07fd7344fce33e7a4f04a8897a"}, - {file = "aiohttp-3.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eaa9256de26ea0334ffa25f1913ae15a51e35c529a1ed9af8e6286dd44312554"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1543e7fb00214fb4ccead42e6a7d86f3bb7c34751ec7c605cca7388e525fd0b4"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:186e94570433a004e05f31f632726ae0f2c9dee4762a9ce915769ce9c0a23d89"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d52d20832ac1560f4510d68e7ba8befbc801a2b77df12bd0cd2bcf3b049e52a4"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c45e4e815ac6af3b72ca2bde9b608d2571737bb1e2d42299fc1ffdf60f6f9a1"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa906b9bdfd4a7972dd0628dbbd6413d2062df5b431194486a78f0d2ae87bd55"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:68bbee9e17d66f17bb0010aa15a22c6eb28583edcc8b3212e2b8e3f77f3ebe2a"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4c189b64bd6d9a403a1a3f86a3ab3acbc3dc41a68f73a268a4f683f89a4dec1f"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8a7876f794523123bca6d44bfecd89c9fec9ec897a25f3dd202ee7fc5c6525b7"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d23fba734e3dd7b1d679b9473129cd52e4ec0e65a4512b488981a56420e708db"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b141753be581fab842a25cb319f79536d19c2a51995d7d8b29ee290169868eab"}, - {file = "aiohttp-3.9.2-cp38-cp38-win32.whl", hash = "sha256:103daf41ff3b53ba6fa09ad410793e2e76c9d0269151812e5aba4b9dd674a7e8"}, - {file = "aiohttp-3.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:328918a6c2835861ff7afa8c6d2c70c35fdaf996205d5932351bdd952f33fa2f"}, - {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5264d7327c9464786f74e4ec9342afbbb6ee70dfbb2ec9e3dfce7a54c8043aa3"}, - {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07205ae0015e05c78b3288c1517afa000823a678a41594b3fdc870878d645305"}, - {file = "aiohttp-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0a1e638cffc3ec4d4784b8b4fd1cf28968febc4bd2718ffa25b99b96a741bd"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d43302a30ba1166325974858e6ef31727a23bdd12db40e725bec0f759abce505"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16a967685907003765855999af11a79b24e70b34dc710f77a38d21cd9fc4f5fe"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fa3ee92cd441d5c2d07ca88d7a9cef50f7ec975f0117cd0c62018022a184308"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b500c5ad9c07639d48615a770f49618130e61be36608fc9bc2d9bae31732b8f"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c07327b368745b1ce2393ae9e1aafed7073d9199e1dcba14e035cc646c7941bf"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc7d6502c23a0ec109687bf31909b3fb7b196faf198f8cff68c81b49eb316ea9"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:07be2be7071723c3509ab5c08108d3a74f2181d4964e869f2504aaab68f8d3e8"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:122468f6fee5fcbe67cb07014a08c195b3d4c41ff71e7b5160a7bcc41d585a5f"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:00a9abcea793c81e7f8778ca195a1714a64f6d7436c4c0bb168ad2a212627000"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a9825fdd64ecac5c670234d80bb52bdcaa4139d1f839165f548208b3779c6c6"}, - {file = "aiohttp-3.9.2-cp39-cp39-win32.whl", hash = "sha256:5422cd9a4a00f24c7244e1b15aa9b87935c85fb6a00c8ac9b2527b38627a9211"}, - {file = "aiohttp-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:7d579dcd5d82a86a46f725458418458fa43686f6a7b252f2966d359033ffc8ab"}, - {file = "aiohttp-3.9.2.tar.gz", hash = "sha256:b0ad0a5e86ce73f5368a164c10ada10504bf91869c05ab75d982c6048217fbf7"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, ] [package.dependencies] @@ -214,13 +214,13 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "aws-sam-translator" -version = "1.83.0" +version = "1.84.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = ">=3.8, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.83.0.tar.gz", hash = "sha256:46025ca8894a56eacd87eb0e4f9af5c01c567c9a734b97fbba353bffd56ba5dc"}, - {file = "aws_sam_translator-1.83.0-py3-none-any.whl", hash = "sha256:022246b4745cc9067f88ab2b051f49898606bcf0222e22b9da41063e5619c6f9"}, + {file = "aws-sam-translator-1.84.0.tar.gz", hash = "sha256:dbfd5669b5ef4bd7bc7af4775eec2ce4db61a2c2a17d721e67b51cf6a6dd63f9"}, + {file = "aws_sam_translator-1.84.0-py3-none-any.whl", hash = "sha256:a24f43e80095c79258a1f1c7a0b8169f55daf0b2bc237d5b9010b02ba86fa3bb"}, ] [package.dependencies] @@ -249,13 +249,13 @@ wrapt = "*" [[package]] name = "azure-core" -version = "1.29.7" +version = "1.30.0" description = "Microsoft Azure Core Library for Python" optional = true python-versions = ">=3.7" files = [ - {file = "azure-core-1.29.7.tar.gz", hash = "sha256:2944faf1a7ff1558b1f457cabf60f279869cabaeef86b353bed8eb032c7d8c5e"}, - {file = "azure_core-1.29.7-py3-none-any.whl", hash = "sha256:95a7b41b4af102e5fcdfac9500fcc82ff86e936c7145a099b7848b9ac0501250"}, + {file = "azure-core-1.30.0.tar.gz", hash = "sha256:6f3a7883ef184722f6bd997262eddaf80cfe7e5b3e0caaaf8db1695695893d35"}, + {file = "azure_core-1.30.0-py3-none-any.whl", hash = "sha256:3dae7962aad109610e68c9a7abb31d79720e1d982ddf61363038d175a5025e89"}, ] [package.dependencies] @@ -408,13 +408,13 @@ files = [ [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -494,13 +494,13 @@ files = [ [[package]] name = "cfn-lint" -version = "0.84.0" +version = "0.85.0" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" optional = false python-versions = ">=3.8, <=4.0, !=4.0" files = [ - {file = "cfn-lint-0.84.0.tar.gz", hash = "sha256:7a819ffa48ab23f775037ddb0d9330ba206d547439f69bc72f76f1183d8bc124"}, - {file = "cfn_lint-0.84.0-py3-none-any.whl", hash = "sha256:1bf18bca82c6666fd204a265a203bd307816e8d31fe476301b105c5370e306e5"}, + {file = "cfn-lint-0.85.0.tar.gz", hash = "sha256:64d6e8d85cdc573b61add78f9ff95a142a1834edb4793d1291551f6d953f73fe"}, + {file = "cfn_lint-0.85.0-py3-none-any.whl", hash = "sha256:e4849e1779bd1a9f4543617372708a20519b6d7cad5f980e20c6deaa227361a2"}, ] [package.dependencies] @@ -708,43 +708,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.0" +version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434"}, - {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc"}, - {file = "cryptography-42.0.0-cp37-abi3-win32.whl", hash = "sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4"}, - {file = "cryptography-42.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0"}, - {file = "cryptography-42.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221"}, - {file = "cryptography-42.0.0-cp39-abi3-win32.whl", hash = "sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b"}, - {file = "cryptography-42.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce"}, - {file = "cryptography-42.0.0.tar.gz", hash = "sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, + {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, + {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, + {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, + {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, + {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, + {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, ] [package.dependencies] @@ -1025,13 +1025,13 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flask" -version = "3.0.1" +version = "3.0.2" description = "A simple framework for building complex web applications." optional = false python-versions = ">=3.8" files = [ - {file = "flask-3.0.1-py3-none-any.whl", hash = "sha256:ca631a507f6dfe6c278ae20112cea3ff54ff2216390bf8880f6b035a5354af13"}, - {file = "flask-3.0.1.tar.gz", hash = "sha256:6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403"}, + {file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"}, + {file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"}, ] [package.dependencies] @@ -1243,13 +1243,13 @@ ray = ["packaging", "ray[client,data] (>=2.0.0)"] [[package]] name = "google-api-core" -version = "2.15.0" +version = "2.16.2" description = "Google API client core library" optional = true python-versions = ">=3.7" files = [ - {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, - {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, + {file = "google-api-core-2.16.2.tar.gz", hash = "sha256:032d37b45d1d6bdaf68fb11ff621e2593263a239fa9246e2e94325f9c47876d2"}, + {file = "google_api_core-2.16.2-py3-none-any.whl", hash = "sha256:449ca0e3f14c179b4165b664256066c7861610f70b6ffe54bb01a04e9b466929"}, ] [package.dependencies] @@ -1265,13 +1265,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.26.2" +version = "2.27.0" description = "Google Authentication Library" optional = true python-versions = ">=3.7" files = [ - {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, - {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, + {file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"}, + {file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"}, ] [package.dependencies] @@ -1541,6 +1541,20 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "griffe" +version = "0.40.0" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "griffe-0.40.0-py3-none-any.whl", hash = "sha256:db1da6d1d8e08cbb20f1a7dee8c09da940540c2d4c1bfa26a9091cf6fc36a9ec"}, + {file = "griffe-0.40.0.tar.gz", hash = "sha256:76c4439eaa2737af46ae003c331ab6ca79c5365b552f7b5aed263a3b4125735b"}, +] + +[package.dependencies] +colorama = ">=0.4" + [[package]] name = "identify" version = "2.5.33" @@ -1876,71 +1890,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.4" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, - {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -2178,85 +2192,101 @@ files = [ [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" optional = true python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] @@ -2342,6 +2372,51 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = true +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -2442,8 +2517,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -2507,28 +2582,28 @@ files = [ [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -3171,13 +3246,13 @@ files = [ [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -3320,13 +3395,13 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. [[package]] name = "referencing" -version = "0.32.1" +version = "0.33.0" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, - {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, + {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, + {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, ] [package.dependencies] @@ -4320,4 +4395,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "d063b6db5f333e4cf881a9778b692e1a9d7926d629141cc8d103520c1fccc6f5" +content-hash = "1b214065fdb37dace3ed6121e96276f73e86c6a993d49d9bc54a36e33b04fc6a" diff --git a/pyproject.toml b/pyproject.toml index 271799412..375884fda 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,11 +85,16 @@ typing-extensions = "4.9.0" pytest-mock = "3.12.0" pyspark = "3.5.0" cython = "3.0.8" +griffe = "^0.40.0" [[tool.mypy.overrides]] module = "pytest_mock.*" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "griffe.*" +ignore_missing_imports = true + [tool.poetry.scripts] pyiceberg = "pyiceberg.cli.console:run" diff --git a/tests/test_api.py b/tests/test_api.py new file mode 100644 index 000000000..86736e8ae --- /dev/null +++ b/tests/test_api.py @@ -0,0 +1,41 @@ +import re +import subprocess + +import griffe +from griffe.exceptions import GitError + + +def test_breaking_change() -> None: + fetch_process = subprocess.run( + ["git", "fetch", "--tags", "upstream"], + check=False, + ) + if fetch_process.returncode != 0: + raise GitError("Cannot fetch Git tags from upstream") + + tag_process = subprocess.run( + ["git", "tag", "-l", "--sort=-version:refname"], + text=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + check=False, + encoding='utf-8', + ) + output = tag_process.stdout.strip() + if tag_process.returncode != 0 or not output: + raise GitError(f"Cannot list Git tags from upstream: {output or 'no tags'}") + tags = output.split("\n") + + p = re.compile(r"^pyiceberg-\d+.\d+.\d+$") + + releases = [t for t in tags if p.match(t)] + + previous = griffe.load_git("pyiceberg", ref=releases[0]) + current = griffe.load_git("pyiceberg") + + breaking_changes = [] + + for breakage in griffe.find_breaking_changes(previous, current): + breaking_changes.append(breakage.as_dict()) + + assert not breaking_changes From 4bf7f78caf3e1b4eb7654e071b788128152d9957 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 8 Feb 2024 14:29:45 +0000 Subject: [PATCH 02/25] license --- mkdocs/gen_doc_stubs.py | 2 +- tests/test_api.py | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/mkdocs/gen_doc_stubs.py b/mkdocs/gen_doc_stubs.py index bd3b12810..b12626325 100644 --- a/mkdocs/gen_doc_stubs.py +++ b/mkdocs/gen_doc_stubs.py @@ -17,7 +17,7 @@ from pathlib import Path -import griffe # type: ignore +import griffe import mkdocs_gen_files # type: ignore nav = mkdocs_gen_files.Nav() diff --git a/tests/test_api.py b/tests/test_api.py index 86736e8ae..f1363f0dd 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + import re import subprocess From 5f5b5792d8dc8df8817bab3b7b1b93e724ceba91 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 8 Feb 2024 15:05:59 +0000 Subject: [PATCH 03/25] handle upstream --- tests/test_api.py | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index f1363f0dd..c0898afa7 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -23,14 +23,29 @@ def test_breaking_change() -> None: - fetch_process = subprocess.run( - ["git", "fetch", "--tags", "upstream"], + check_if_upstream = subprocess.run( + ["git", "remote", "get-url", "origin"], + text=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + check=False, + encoding='utf-8', + ) + if check_if_upstream.returncode != 0: + raise GitError("This is not a git repository") + + fetch_cmd = ["git", "fetch", "--tags"] + if not check_if_upstream.stdout == "https://github.com/apache/iceberg-python.git": + fetch_cmd.append("upstream") + + fetch_tags = subprocess.run( + fetch_cmd, check=False, ) - if fetch_process.returncode != 0: + if fetch_tags.returncode != 0: raise GitError("Cannot fetch Git tags from upstream") - tag_process = subprocess.run( + list_tags = subprocess.run( ["git", "tag", "-l", "--sort=-version:refname"], text=True, stdout=subprocess.PIPE, @@ -38,8 +53,8 @@ def test_breaking_change() -> None: check=False, encoding='utf-8', ) - output = tag_process.stdout.strip() - if tag_process.returncode != 0 or not output: + output = list_tags.stdout + if list_tags.returncode != 0 or not output: raise GitError(f"Cannot list Git tags from upstream: {output or 'no tags'}") tags = output.split("\n") @@ -55,4 +70,4 @@ def test_breaking_change() -> None: for breakage in griffe.find_breaking_changes(previous, current): breaking_changes.append(breakage.as_dict()) - assert not breaking_changes + assert not breaking_changes, f"Total of {len(breaking_changes)} breaking API changes since {releases[0]}" From 0e273aedef1f9d684f6167f96a0b2fd45de25a47 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 8 Feb 2024 15:18:59 +0000 Subject: [PATCH 04/25] log --- tests/test_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index c0898afa7..2ebaf40e4 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -43,7 +43,7 @@ def test_breaking_change() -> None: check=False, ) if fetch_tags.returncode != 0: - raise GitError("Cannot fetch Git tags from upstream") + raise GitError(f"Cannot fetch Git tags with command: {fetch_cmd} from origin: {check_if_upstream.stdout}") list_tags = subprocess.run( ["git", "tag", "-l", "--sort=-version:refname"], @@ -55,7 +55,7 @@ def test_breaking_change() -> None: ) output = list_tags.stdout if list_tags.returncode != 0 or not output: - raise GitError(f"Cannot list Git tags from upstream: {output or 'no tags'}") + raise GitError(f"Cannot list Git tags: {output or 'no tags'}") tags = output.split("\n") p = re.compile(r"^pyiceberg-\d+.\d+.\d+$") From dddf6a3fecf4907bbdb93ea00506c13905b026cf Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 8 Feb 2024 15:26:52 +0000 Subject: [PATCH 05/25] fix check --- tests/test_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_api.py b/tests/test_api.py index 2ebaf40e4..e93bdd32c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -35,7 +35,7 @@ def test_breaking_change() -> None: raise GitError("This is not a git repository") fetch_cmd = ["git", "fetch", "--tags"] - if not check_if_upstream.stdout == "https://github.com/apache/iceberg-python.git": + if not check_if_upstream.stdout == "https://github.com/apache/iceberg-python": fetch_cmd.append("upstream") fetch_tags = subprocess.run( From 976c0ea6f00ce2bd8894cf066d6a43b939f66ee5 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 8 Feb 2024 16:31:47 +0000 Subject: [PATCH 06/25] fix --- tests/test_api.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index e93bdd32c..f556912b0 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -34,8 +34,9 @@ def test_breaking_change() -> None: if check_if_upstream.returncode != 0: raise GitError("This is not a git repository") + origin = check_if_upstream.stdout.strip() fetch_cmd = ["git", "fetch", "--tags"] - if not check_if_upstream.stdout == "https://github.com/apache/iceberg-python": + if not origin == "https://github.com/apache/iceberg-python": fetch_cmd.append("upstream") fetch_tags = subprocess.run( @@ -43,7 +44,7 @@ def test_breaking_change() -> None: check=False, ) if fetch_tags.returncode != 0: - raise GitError(f"Cannot fetch Git tags with command: {fetch_cmd} from origin: {check_if_upstream.stdout}") + raise GitError(f"Cannot fetch Git tags with command: {fetch_cmd} from origin: {origin}") list_tags = subprocess.run( ["git", "tag", "-l", "--sort=-version:refname"], @@ -53,7 +54,7 @@ def test_breaking_change() -> None: check=False, encoding='utf-8', ) - output = list_tags.stdout + output = list_tags.stdout.strip() if list_tags.returncode != 0 or not output: raise GitError(f"Cannot list Git tags: {output or 'no tags'}") tags = output.split("\n") From 04a26777bf9e93f37ba0ee6af69714ffa0a3503e Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Sat, 17 Feb 2024 15:15:21 +0000 Subject: [PATCH 07/25] refactor --- Makefile | 3 +++ pyproject.toml | 3 ++- tests/{ => api}/test_api.py | 3 +++ 3 files changed, 8 insertions(+), 1 deletion(-) rename tests/{ => api}/test_api.py (97%) diff --git a/Makefile b/Makefile index c3e816ebd..6af6ab793 100644 --- a/Makefile +++ b/Makefile @@ -36,6 +36,9 @@ test-s3: sh ./dev/run-minio.sh poetry run pytest tests/ -m s3 ${PYTEST_ARGS} +test-api: + poetry run pytest tests/ -m api ${PYTEST_ARGS} + test-integration: docker-compose -f dev/docker-compose-integration.yml kill docker-compose -f dev/docker-compose-integration.yml rm -f diff --git a/pyproject.toml b/pyproject.toml index 0632ec8c8..fd9400627 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -129,7 +129,8 @@ markers = [ "s3: marks a test as requiring access to s3 compliant storage (use with --aws-access-key-id, --aws-secret-access-key, and --endpoint args)", "adlfs: marks a test as requiring access to adlfs compliant storage (use with --adlfs.account-name, --adlfs.account-key, and --adlfs.endpoint args)", "integration: marks integration tests against Apache Spark", - "gcs: marks a test as requiring access to gcs compliant storage (use with --gs.token, --gs.project, and --gs.endpoint)" + "gcs: marks a test as requiring access to gcs compliant storage (use with --gs.token, --gs.project, and --gs.endpoint)", + "api: marks a test as as a API backwards compatibility test" ] # Turns a warning into an error diff --git a/tests/test_api.py b/tests/api/test_api.py similarity index 97% rename from tests/test_api.py rename to tests/api/test_api.py index f556912b0..c82f8bbd3 100644 --- a/tests/test_api.py +++ b/tests/api/test_api.py @@ -15,13 +15,16 @@ # specific language governing permissions and limitations # under the License. +import pytest import re import subprocess import griffe +from griffe.enumerations import BreakageKind from griffe.exceptions import GitError +@pytest.mark.api def test_breaking_change() -> None: check_if_upstream = subprocess.run( ["git", "remote", "get-url", "origin"], From d40a1c32ea81403a60dda7057f7f2211b7649723 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Sat, 17 Feb 2024 15:18:42 +0000 Subject: [PATCH 08/25] lint --- tests/api/test_api.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/api/test_api.py b/tests/api/test_api.py index c82f8bbd3..b0c9917f0 100644 --- a/tests/api/test_api.py +++ b/tests/api/test_api.py @@ -15,12 +15,11 @@ # specific language governing permissions and limitations # under the License. -import pytest import re import subprocess import griffe -from griffe.enumerations import BreakageKind +import pytest from griffe.exceptions import GitError From 68123dfd37ebb434482dbecaaa42012dc70daaa0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Feb 2024 13:22:52 +0100 Subject: [PATCH 09/25] Build: Bump getdaft from 0.2.14 to 0.2.15 (#434) Bumps [getdaft](https://github.com/Eventual-Inc/Daft) from 0.2.14 to 0.2.15. - [Release notes](https://github.com/Eventual-Inc/Daft/releases) - [Commits](https://github.com/Eventual-Inc/Daft/compare/v0.2.14...v0.2.15) --- updated-dependencies: - dependency-name: getdaft dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 29a8e5d93..7cd73b0e2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1219,17 +1219,17 @@ gcsfuse = ["fusepy"] [[package]] name = "getdaft" -version = "0.2.14" +version = "0.2.15" description = "Distributed Dataframes for Multimodal Data" optional = true python-versions = ">=3.7" files = [ - {file = "getdaft-0.2.14-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:01754aa7f6059cfec363eff9a8e46197bbbc65634da9c12cd998ce2223fb8c45"}, - {file = "getdaft-0.2.14-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:8cf445538b7e0d5016c548b0e951dab3f5cf2fe6303bddd09d9c5ff5747894e1"}, - {file = "getdaft-0.2.14-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3add9d73702765e261d3e88ef04099d6a9c95f90d19c96ff4e014f4d9ed6433e"}, - {file = "getdaft-0.2.14-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:110b0b35bc9732926f9a8b9c3f6996d834a0f24ccb0c400f9b03d580dcd90096"}, - {file = "getdaft-0.2.14-cp37-abi3-win_amd64.whl", hash = "sha256:34ebbbf040982f219c3bae5e2046322150332a3e65ad67ba71985db59b2fff68"}, - {file = "getdaft-0.2.14.tar.gz", hash = "sha256:07cd349fe961536c6bd172b6cf02217fb1fd23a2a021571f6a5c6f0dddb184ec"}, + {file = "getdaft-0.2.15-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:95c16b0f25a78a13cab21128bcd0b5e7af65a14b12fd037ef8ec8aee5b7ac911"}, + {file = "getdaft-0.2.15-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:6afb66507ae899fb32adc8532b25ddd245a14d695a099ceb594afcb24848adb0"}, + {file = "getdaft-0.2.15-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:580a9971807e30a21136ae10eeb39cb2c880ab6eb87a464447206e4d36d52e2b"}, + {file = "getdaft-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44424e2adc80f12e3a404cc389a1b37eabbd1c8a3f0345d218852a65a7c3593d"}, + {file = "getdaft-0.2.15-cp37-abi3-win_amd64.whl", hash = "sha256:10103355c8a48455a1b2262bc1b7eca6b495059da7f2d220758bc273b734b898"}, + {file = "getdaft-0.2.15.tar.gz", hash = "sha256:5729915db8e15b6d42568cceef4a588ecdc5ce1a29f52c362e41d789bffb32e7"}, ] [package.dependencies] From 8241bf3d393a509f2bec553eefbb409fa8888814 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Mon, 19 Feb 2024 19:00:35 +0100 Subject: [PATCH 10/25] docs: Add missing release steps (#443) --- mkdocs/docs/how-to-release.md | 32 +++++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/mkdocs/docs/how-to-release.md b/mkdocs/docs/how-to-release.md index 5a2072ec0..e41250a1f 100644 --- a/mkdocs/docs/how-to-release.md +++ b/mkdocs/docs/how-to-release.md @@ -146,11 +146,37 @@ cat release-announcement-email.txt ## Vote has passed -Once the vote has been passed, the latest version can be pushed to PyPi. Check out the Apache SVN and make sure to publish the right version with `twine`: +Once the vote has been passed, you can close the vote thread by concluding it: + +``` +Thanks everyone for voting! The 72 hours have passed, and a minimum of 3 binding votes have been cast: + ++1 Foo Bar (non-binding) +... ++1 Fokko Driesprong (binding) + +The release candidate has been accepted as PyIceberg . Thanks everyone, when all artifacts are published the announcement will be sent out. + +Kind regards, +``` + +### Copy the artifacts to the release dist + +``` +svn checkout https://dist.apache.org/repos/dist/dev/iceberg /tmp/iceberg-dist-dev +svn checkout https://dist.apache.org/repos/dist/release/iceberg/ /tmp/iceberg-dist-release + +mkdir -p /tmp/iceberg-dist-release/pyiceberg- +cp -r /tmp/iceberg-dist-dev/pyiceberg-rcN/* /tmp/iceberg-dist-release/pyiceberg- + +svn add /tmp/iceberg-dist-release/ +svn ci -m "PyIceberg " /tmp/iceberg-dist-release/ +``` + +The latest version can be pushed to PyPi. Check out the Apache SVN and make sure to publish the right version with `twine`: ```bash -svn checkout https://dist.apache.org/repos/dist/dev/iceberg /tmp/ -twine upload -s /tmp/iceberg/pyiceberg-0.1.0rc1 +twine upload -s /tmp/iceberg-dist-release/pyiceberg-/* ``` Send out an announcement on the dev mail list: From 92f1d7479591c8be4585476f9e2d7e5a72aa9937 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 21:05:07 -0800 Subject: [PATCH 11/25] Build: Bump moto from 5.0.1 to 5.0.2 (#447) Bumps [moto](https://github.com/getmoto/moto) from 5.0.1 to 5.0.2. - [Release notes](https://github.com/getmoto/moto/releases) - [Changelog](https://github.com/getmoto/moto/blob/master/CHANGELOG.md) - [Commits](https://github.com/getmoto/moto/compare/5.0.1...5.0.2) --- updated-dependencies: - dependency-name: moto dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 105 +++++++++++++++---------------------------------- pyproject.toml | 2 +- 2 files changed, 32 insertions(+), 75 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7cd73b0e2..04593599f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -937,24 +937,6 @@ files = [ {file = "duckdb-0.10.0.tar.gz", hash = "sha256:c02bcc128002aa79e3c9d89b9de25e062d1096a8793bc0d7932317b7977f6845"}, ] -[[package]] -name = "ecdsa" -version = "0.18.0" -description = "ECDSA cryptographic signature library (pure python)" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, - {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, -] - -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] - [[package]] name = "exceptiongroup" version = "1.2.0" @@ -1689,6 +1671,23 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "joserfc" +version = "0.9.0" +description = "The ultimate Python library for JOSE RFCs, including JWS, JWE, JWK, JWA, JWT" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joserfc-0.9.0-py3-none-any.whl", hash = "sha256:4026bdbe2c196cd40574e916fa1e28874d99649412edaab0e373dec3077153fb"}, + {file = "joserfc-0.9.0.tar.gz", hash = "sha256:eebca7f587b1761ce43a98ffd5327f2b600b9aa5bb0a77b947687f503ad43bc0"}, +] + +[package.dependencies] +cryptography = "*" + +[package.extras] +drafts = ["pycryptodome"] + [[package]] name = "jschema-to-python" version = "1.2.3" @@ -2021,13 +2020,13 @@ files = [ [[package]] name = "moto" -version = "5.0.1" +version = "5.0.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "moto-5.0.1-py2.py3-none-any.whl", hash = "sha256:94e3b07a403cc8078ffee94bf404ef677112d036a57ddb5e0f19c5fcf48987f5"}, - {file = "moto-5.0.1.tar.gz", hash = "sha256:62b9798aef9028432194cebb7a671f4064257bb3be662d9c1b83b94411b694bb"}, + {file = "moto-5.0.2-py2.py3-none-any.whl", hash = "sha256:71bb832a18b64f10fc4cec117b9b0e2305e5831d9a17eb74f6b9819ed7613843"}, + {file = "moto-5.0.2.tar.gz", hash = "sha256:7e27395e5c63ff9554ae14b5baa41bfe6d6b1be0e59eb02977c6ce28411246de"}, ] [package.dependencies] @@ -2037,44 +2036,41 @@ botocore = ">=1.14.0" cfn-lint = {version = ">=0.40.0", optional = true, markers = "extra == \"server\""} cryptography = ">=3.3.1" docker = {version = ">=3.0.0", optional = true, markers = "extra == \"server\""} -ecdsa = {version = "!=0.15", optional = true, markers = "extra == \"server\""} flask = {version = "<2.2.0 || >2.2.0,<2.2.1 || >2.2.1", optional = true, markers = "extra == \"server\""} flask-cors = {version = "*", optional = true, markers = "extra == \"server\""} graphql-core = {version = "*", optional = true, markers = "extra == \"server\""} Jinja2 = ">=2.10.1" +joserfc = {version = ">=0.9.0", optional = true, markers = "extra == \"server\""} jsondiff = {version = ">=1.1.2", optional = true, markers = "extra == \"server\""} openapi-spec-validator = {version = ">=0.5.0", optional = true, markers = "extra == \"server\""} py-partiql-parser = {version = "0.5.1", optional = true, markers = "extra == \"server\""} pyparsing = {version = ">=3.0.7", optional = true, markers = "extra == \"server\""} python-dateutil = ">=2.1,<3.0.0" -python-jose = {version = ">=3.1.0,<4.0.0", extras = ["cryptography"], optional = true, markers = "extra == \"server\""} PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"server\""} requests = ">=2.5" responses = ">=0.15.0" setuptools = {version = "*", optional = true, markers = "extra == \"server\""} -sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"server\""} werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] -ec2 = ["sshpubkeys (>=3.1.0)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)"] s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.1)"] s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.1)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] ssm = ["PyYAML (>=5.1)"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] @@ -2861,7 +2857,7 @@ numpy = ">=1.16.6,<2" name = "pyasn1" version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false +optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, @@ -3173,28 +3169,6 @@ files = [ [package.dependencies] six = ">=1.5" -[[package]] -name = "python-jose" -version = "3.3.0" -description = "JOSE implementation in Python" -optional = false -python-versions = "*" -files = [ - {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, - {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, -] - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""} -ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" - -[package.extras] -cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] - [[package]] name = "python-snappy" version = "0.6.1" @@ -3740,7 +3714,7 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -optional = false +optional = true python-versions = ">=3.6,<4" files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, @@ -3927,24 +3901,6 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] -[[package]] -name = "sshpubkeys" -version = "3.3.1" -description = "SSH public key parser" -optional = false -python-versions = ">=3" -files = [ - {file = "sshpubkeys-3.3.1-py2.py3-none-any.whl", hash = "sha256:946f76b8fe86704b0e7c56a00d80294e39bc2305999844f079a217885060b1ac"}, - {file = "sshpubkeys-3.3.1.tar.gz", hash = "sha256:3020ed4f8c846849299370fbe98ff4157b0ccc1accec105e07cfa9ae4bb55064"}, -] - -[package.dependencies] -cryptography = ">=2.1.4" -ecdsa = ">=0.13" - -[package.extras] -dev = ["twine", "wheel", "yapf"] - [[package]] name = "strictyaml" version = "1.7.3" @@ -4403,3 +4359,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" +content-hash = "0a531611ad417be25c145f961e4d808b01a977dd323510e2fcccc262191b7caf" diff --git a/pyproject.toml b/pyproject.toml index fd9400627..d67c512f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ pre-commit = "3.5.0" fastavro = "1.9.4" coverage = { version = "^7.4.1", extras = ["toml"] } requests-mock = "1.11.0" -moto = { version = "^5.0.1", extras = ["server"] } +moto = { version = "^5.0.2", extras = ["server"] } typing-extensions = "4.9.0" pytest-mock = "3.12.0" pyspark = "3.5.0" From aa0336ca9597b2207256115972c2d97ddf9a5d29 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 21:06:06 -0800 Subject: [PATCH 12/25] Build: Bump mkdocs-material from 9.5.9 to 9.5.10 (#448) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.9 to 9.5.10. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.9...9.5.10) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 03d9e4f47..e2099b7c5 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -23,6 +23,6 @@ mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 -mkdocs-material==9.5.9 +mkdocs-material==9.5.10 mkdocs-material-extensions==1.3.1 mkdocs-section-index==0.3.8 From a38f12a8c479dc2fe5303a37b2ab7ad79af60ced Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 20 Feb 2024 20:09:14 +0100 Subject: [PATCH 13/25] Make the snapshot creation part of the `Transaction` (#446) * Make the snapshot creation part of the `Transaction` This is also how it is done in Java, and I really like it since it allows you to easily queue up updates in a transaction. For example, an update to the schema. * Extend the API --- pyiceberg/io/pyarrow.py | 11 +- pyiceberg/table/__init__.py | 251 ++++++++++++++++++++----------- tests/integration/test_writes.py | 42 ++++++ 3 files changed, 213 insertions(+), 91 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 57f09ba17..f391abfea 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -1714,7 +1714,7 @@ def fill_parquet_file_metadata( data_file.split_offsets = split_offsets -def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: +def write_file(table: Table, tasks: Iterator[WriteTask], file_schema: Optional[Schema] = None) -> Iterator[DataFile]: task = next(tasks) try: @@ -1727,7 +1727,8 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: parquet_writer_kwargs = _get_parquet_writer_kwargs(table.properties) file_path = f'{table.location()}/data/{task.generate_data_file_filename("parquet")}' - file_schema = schema_to_pyarrow(table.schema()) + file_schema = file_schema or table.schema() + arrow_file_schema = schema_to_pyarrow(file_schema) fo = table.io.new_output(file_path) row_group_size = PropertyUtil.property_as_int( @@ -1736,7 +1737,7 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: default=TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES_DEFAULT, ) with fo.create(overwrite=True) as fos: - with pq.ParquetWriter(fos, schema=file_schema, **parquet_writer_kwargs) as writer: + with pq.ParquetWriter(fos, schema=arrow_file_schema, **parquet_writer_kwargs) as writer: writer.write_table(task.df, row_group_size=row_group_size) data_file = DataFile( @@ -1758,8 +1759,8 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: fill_parquet_file_metadata( data_file=data_file, parquet_metadata=writer.writer.metadata, - stats_columns=compute_statistics_plan(table.schema(), table.properties), - parquet_column_mapping=parquet_path_to_id_mapping(table.schema()), + stats_columns=compute_statistics_plan(file_schema, table.properties), + parquet_column_mapping=parquet_path_to_id_mapping(file_schema), ) return iter([data_file]) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index a87435fcf..a939294f3 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -329,6 +329,14 @@ def update_schema(self) -> UpdateSchema: """ return UpdateSchema(self._table, self) + def update_snapshot(self) -> UpdateSnapshot: + """Create a new UpdateSnapshot to produce a new snapshot for the table. + + Returns: + A new UpdateSnapshot + """ + return UpdateSnapshot(self._table, self) + def remove_properties(self, *removals: str) -> Transaction: """Remove properties. @@ -351,6 +359,12 @@ def update_location(self, location: str) -> Transaction: """ raise NotImplementedError("Not yet implemented") + def schema(self) -> Schema: + try: + return next(update for update in self._updates if isinstance(update, AddSchemaUpdate)).schema_ + except StopIteration: + return self._table.schema() + def commit_transaction(self) -> Table: """Commit the changes to the catalog. @@ -965,8 +979,21 @@ def history(self) -> List[SnapshotLogEntry]: return self.metadata.snapshot_log def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: + """Create a new UpdateSchema to alter the columns of this table. + + Returns: + A new UpdateSchema. + """ return UpdateSchema(self, allow_incompatible_changes=allow_incompatible_changes, case_sensitive=case_sensitive) + def update_snapshot(self) -> UpdateSnapshot: + """Create a new UpdateSnapshot to produce a new snapshot for the table. + + Returns: + A new UpdateSnapshot + """ + return UpdateSnapshot(self) + def name_mapping(self) -> NameMapping: """Return the table's field-id NameMapping.""" if name_mapping_json := self.properties.get(TableProperties.DEFAULT_NAME_MAPPING): @@ -976,7 +1003,7 @@ def name_mapping(self) -> NameMapping: def append(self, df: pa.Table) -> None: """ - Append data to the table. + Shorthand API for appending a PyArrow table to the table. Args: df: The Arrow dataframe that will be appended to overwrite the table @@ -992,19 +1019,16 @@ def append(self, df: pa.Table) -> None: if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - merge = _MergingSnapshotProducer(operation=Operation.APPEND, table=self) - - # skip writing data files if the dataframe is empty - if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, df=df) - for data_file in data_files: - merge.append_data_file(data_file) - - merge.commit() + with self.update_snapshot().fast_append() as update_snapshot: + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files(self, df=df) + for data_file in data_files: + update_snapshot.append_data_file(data_file) def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_TRUE) -> None: """ - Overwrite all the data in the table. + Shorthand for overwriting the table with a PyArrow table. Args: df: The Arrow dataframe that will be used to overwrite the table @@ -1025,18 +1049,12 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - merge = _MergingSnapshotProducer( - operation=Operation.OVERWRITE if self.current_snapshot() is not None else Operation.APPEND, - table=self, - ) - - # skip writing data files if the dataframe is empty - if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, df=df) - for data_file in data_files: - merge.append_data_file(data_file) - - merge.commit() + with self.update_snapshot().overwrite() as update_snapshot: + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files(self, df=df) + for data_file in data_files: + update_snapshot.append_data_file(data_file) def refs(self) -> Dict[str, SnapshotRef]: """Return the snapshot references in the table.""" @@ -2331,7 +2349,12 @@ def _generate_manifest_list_path(location: str, snapshot_id: int, attempt: int, return f'{location}/metadata/snap-{snapshot_id}-{attempt}-{commit_uuid}.avro' -def _dataframe_to_data_files(table: Table, df: pa.Table) -> Iterable[DataFile]: +def _dataframe_to_data_files(table: Table, df: pa.Table, file_schema: Optional[Schema] = None) -> Iterable[DataFile]: + """Convert a PyArrow table into a DataFile. + + Returns: + An iterable that supplies datafiles that represent the table. + """ from pyiceberg.io.pyarrow import write_file if len(table.spec().fields) > 0: @@ -2342,7 +2365,7 @@ def _dataframe_to_data_files(table: Table, df: pa.Table) -> Iterable[DataFile]: # This is an iter, so we don't have to materialize everything every time # This will be more relevant when we start doing partitioned writes - yield from write_file(table, iter([WriteTask(write_uuid, next(counter), df)])) + yield from write_file(table, iter([WriteTask(write_uuid, next(counter), df)]), file_schema=file_schema) class _MergingSnapshotProducer: @@ -2352,8 +2375,9 @@ class _MergingSnapshotProducer: _parent_snapshot_id: Optional[int] _added_data_files: List[DataFile] _commit_uuid: uuid.UUID + _transaction: Optional[Transaction] - def __init__(self, operation: Operation, table: Table) -> None: + def __init__(self, operation: Operation, table: Table, transaction: Optional[Transaction] = None) -> None: self._operation = operation self._table = table self._snapshot_id = table.new_snapshot_id() @@ -2361,46 +2385,25 @@ def __init__(self, operation: Operation, table: Table) -> None: self._parent_snapshot_id = snapshot.snapshot_id if (snapshot := self._table.current_snapshot()) else None self._added_data_files = [] self._commit_uuid = uuid.uuid4() + self._transaction = transaction + + def __enter__(self) -> _MergingSnapshotProducer: + """Start a transaction to update the table.""" + return self + + def __exit__(self, _: Any, value: Any, traceback: Any) -> None: + """Close and commit the transaction.""" + self.commit() def append_data_file(self, data_file: DataFile) -> _MergingSnapshotProducer: self._added_data_files.append(data_file) return self - def _deleted_entries(self) -> List[ManifestEntry]: - """To determine if we need to record any deleted entries. - - With partial overwrites we have to use the predicate to evaluate - which entries are affected. - """ - if self._operation == Operation.OVERWRITE: - if self._parent_snapshot_id is not None: - previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) - if previous_snapshot is None: - # This should never happen since you cannot overwrite an empty table - raise ValueError(f"Could not find the previous snapshot: {self._parent_snapshot_id}") - - executor = ExecutorFactory.get_or_create() - - def _get_entries(manifest: ManifestFile) -> List[ManifestEntry]: - return [ - ManifestEntry( - status=ManifestEntryStatus.DELETED, - snapshot_id=entry.snapshot_id, - data_sequence_number=entry.data_sequence_number, - file_sequence_number=entry.file_sequence_number, - data_file=entry.data_file, - ) - for entry in manifest.fetch_manifest_entry(self._table.io, discard_deleted=True) - if entry.data_file.content == DataFileContent.DATA - ] + @abstractmethod + def _deleted_entries(self) -> List[ManifestEntry]: ... - list_of_entries = executor.map(_get_entries, previous_snapshot.manifests(self._table.io)) - return list(chain(*list_of_entries)) - return [] - elif self._operation == Operation.APPEND: - return [] - else: - raise ValueError(f"Not implemented for: {self._operation}") + @abstractmethod + def _existing_manifests(self) -> List[ManifestFile]: ... def _manifests(self) -> List[ManifestFile]: def _write_added_manifest() -> List[ManifestFile]: @@ -2430,7 +2433,7 @@ def _write_added_manifest() -> List[ManifestFile]: def _write_delete_manifest() -> List[ManifestFile]: # Check if we need to mark the files as deleted deleted_entries = self._deleted_entries() - if deleted_entries: + if len(deleted_entries) > 0: output_file_location = _new_manifest_path(location=self._table.location(), num=1, commit_uuid=self._commit_uuid) with write_manifest( format_version=self._table.format_version, @@ -2445,32 +2448,11 @@ def _write_delete_manifest() -> List[ManifestFile]: else: return [] - def _fetch_existing_manifests() -> List[ManifestFile]: - existing_manifests = [] - - # Add existing manifests - if self._operation == Operation.APPEND and self._parent_snapshot_id is not None: - # In case we want to append, just add the existing manifests - previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) - - if previous_snapshot is None: - raise ValueError(f"Snapshot could not be found: {self._parent_snapshot_id}") - - for manifest in previous_snapshot.manifests(io=self._table.io): - if ( - manifest.has_added_files() - or manifest.has_existing_files() - or manifest.added_snapshot_id == self._snapshot_id - ): - existing_manifests.append(manifest) - - return existing_manifests - executor = ExecutorFactory.get_or_create() added_manifests = executor.submit(_write_added_manifest) delete_manifests = executor.submit(_write_delete_manifest) - existing_manifests = executor.submit(_fetch_existing_manifests) + existing_manifests = executor.submit(self._existing_manifests) return added_manifests.result() + delete_manifests.result() + existing_manifests.result() @@ -2515,10 +2497,107 @@ def commit(self) -> Snapshot: schema_id=self._table.schema().schema_id, ) - with self._table.transaction() as tx: - tx.add_snapshot(snapshot=snapshot) - tx.set_ref_snapshot( + if self._transaction is not None: + self._transaction.add_snapshot(snapshot=snapshot) + self._transaction.set_ref_snapshot( snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, ref_name="main", type="branch" ) + else: + with self._table.transaction() as tx: + tx.add_snapshot(snapshot=snapshot) + tx.set_ref_snapshot( + snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, ref_name="main", type="branch" + ) return snapshot + + +class FastAppendFiles(_MergingSnapshotProducer): + def _existing_manifests(self) -> List[ManifestFile]: + """To determine if there are any existing manifest files. + + A fast append will add another ManifestFile to the ManifestList. + All the existing manifest files are considered existing. + """ + existing_manifests = [] + + if self._parent_snapshot_id is not None: + previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) + + if previous_snapshot is None: + raise ValueError(f"Snapshot could not be found: {self._parent_snapshot_id}") + + for manifest in previous_snapshot.manifests(io=self._table.io): + if manifest.has_added_files() or manifest.has_existing_files() or manifest.added_snapshot_id == self._snapshot_id: + existing_manifests.append(manifest) + + return existing_manifests + + def _deleted_entries(self) -> List[ManifestEntry]: + """To determine if we need to record any deleted manifest entries. + + In case of an append, nothing is deleted. + """ + return [] + + +class OverwriteFiles(_MergingSnapshotProducer): + def _existing_manifests(self) -> List[ManifestFile]: + """To determine if there are any existing manifest files. + + In the of a full overwrite, all the previous manifests are + considered deleted. + """ + return [] + + def _deleted_entries(self) -> List[ManifestEntry]: + """To determine if we need to record any deleted entries. + + With a full overwrite all the entries are considered deleted. + With partial overwrites we have to use the predicate to evaluate + which entries are affected. + """ + if self._parent_snapshot_id is not None: + previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) + if previous_snapshot is None: + # This should never happen since you cannot overwrite an empty table + raise ValueError(f"Could not find the previous snapshot: {self._parent_snapshot_id}") + + executor = ExecutorFactory.get_or_create() + + def _get_entries(manifest: ManifestFile) -> List[ManifestEntry]: + return [ + ManifestEntry( + status=ManifestEntryStatus.DELETED, + snapshot_id=entry.snapshot_id, + data_sequence_number=entry.data_sequence_number, + file_sequence_number=entry.file_sequence_number, + data_file=entry.data_file, + ) + for entry in manifest.fetch_manifest_entry(self._table.io, discard_deleted=True) + if entry.data_file.content == DataFileContent.DATA + ] + + list_of_entries = executor.map(_get_entries, previous_snapshot.manifests(self._table.io)) + return list(chain(*list_of_entries)) + else: + return [] + + +class UpdateSnapshot: + _table: Table + _transaction: Optional[Transaction] + + def __init__(self, table: Table, transaction: Optional[Transaction] = None) -> None: + self._table = table + self._transaction = transaction + + def fast_append(self) -> FastAppendFiles: + return FastAppendFiles(table=self._table, operation=Operation.APPEND, transaction=self._transaction) + + def overwrite(self) -> OverwriteFiles: + return OverwriteFiles( + table=self._table, + operation=Operation.OVERWRITE if self._table.current_snapshot() is not None else Operation.APPEND, + transaction=self._transaction, + ) diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index 54b647b8e..fa5e93d92 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -35,6 +35,7 @@ from pyiceberg.catalog.sql import SqlCatalog from pyiceberg.exceptions import NamespaceAlreadyExistsError, NoSuchTableError from pyiceberg.schema import Schema +from pyiceberg.table import _dataframe_to_data_files from pyiceberg.types import ( BinaryType, BooleanType, @@ -634,3 +635,44 @@ def test_duckdb_url_import(warehouse: Path, arrow_table_with_null: pa.Table) -> b'\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11', ), ] + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +def test_write_and_evolve(session_catalog: Catalog, format_version: int) -> None: + identifier = f"default.arrow_write_data_and_evolve_schema_v{format_version}" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + pa_table = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + }, + schema=pa.schema([pa.field("foo", pa.string(), nullable=True)]), + ) + + tbl = session_catalog.create_table( + identifier=identifier, schema=pa_table.schema, properties={"format-version": str(format_version)} + ) + + pa_table_with_column = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + 'bar': [19, None, 25], + }, + schema=pa.schema([ + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=True), + ]), + ) + + with tbl.transaction() as txn: + with txn.update_schema() as schema_txn: + schema_txn.union_by_name(pa_table_with_column.schema) + + with txn.update_snapshot().fast_append() as snapshot_update: + for data_file in _dataframe_to_data_files(table=tbl, df=pa_table_with_column, file_schema=txn.schema()): + snapshot_update.append_data_file(data_file) From 10978717a635fd9b3c4344a2990362a8eaf6a80a Mon Sep 17 00:00:00 2001 From: Eduard Tudenhoefner Date: Tue, 20 Feb 2024 22:06:30 +0100 Subject: [PATCH 14/25] Send X-Iceberg-Access-Delegation header to signal support for vended credentials/remote signing (#436) * Send X-Iceberg-Access-Delegation header to signal support for vended credentials/remote signing Clients can optionally send this header to signal which delegated access pattern it can support. At this point the iceberg-python client can support `vended-credentials` and `remote-signing`, thus we can always send this header. Addtional details about this header can be found in the REST OpenAPI spec: https://github.com/apache/iceberg/blob/main/open-api/rest-catalog-open-api.yaml#L1459-L1483 * Update rest.py --- pyiceberg/catalog/rest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index b4b9f722b..8560668a0 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -237,6 +237,7 @@ def _create_session(self) -> Session: session.headers["Content-type"] = "application/json" session.headers["X-Client-Version"] = ICEBERG_REST_SPEC_VERSION session.headers["User-Agent"] = f"PyIceberg/{__version__}" + session.headers["X-Iceberg-Access-Delegation"] = "vended-credentials" # Configure SigV4 Request Signing if str(self.properties.get(SIGV4, False)).lower() == "true": From f5fd479675acb20adb82d3502aa0d7390e0b33d1 Mon Sep 17 00:00:00 2001 From: Anupam Saini <4581797+anupam-saini@users.noreply.github.com> Date: Tue, 20 Feb 2024 16:07:41 -0500 Subject: [PATCH 15/25] Retry with new Access Token HTTP 419 (#340) * Refresh Auth token on expiry * Check call count * Add test to cover retry logic * Update poetry.lock with tenacity * Fix tests for Python <= 3.9 --- poetry.lock | 14 ++++++++ pyiceberg/catalog/rest.py | 46 ++++++++++++++++++++---- pyproject.toml | 5 +++ tests/catalog/test_rest.py | 72 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 130 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 04593599f..80624a253 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3929,6 +3929,20 @@ files = [ [package.dependencies] mpmath = ">=0.19" +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + [[package]] name = "thrift" version = "0.16.0" diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 8560668a0..9ca8aa641 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -30,6 +30,7 @@ from pydantic import Field, ValidationError from requests import HTTPError, Session +from tenacity import RetryCallState, retry, retry_if_exception_type, stop_after_attempt from pyiceberg import __version__ from pyiceberg.catalog import ( @@ -118,6 +119,19 @@ class Endpoints: NAMESPACE_SEPARATOR = b"\x1F".decode(UTF8) +def _retry_hook(retry_state: RetryCallState) -> None: + rest_catalog: RestCatalog = retry_state.args[0] + rest_catalog._refresh_token() # pylint: disable=protected-access + + +_RETRY_ARGS = { + "retry": retry_if_exception_type(AuthorizationExpiredError), + "stop": stop_after_attempt(2), + "before": _retry_hook, + "reraise": True, +} + + class TableResponse(IcebergBaseModel): metadata_location: str = Field(alias="metadata-location") metadata: TableMetadata @@ -225,13 +239,7 @@ def _create_session(self) -> Session: elif ssl_client_cert := ssl_client.get(CERT): session.cert = ssl_client_cert - # If we have credentials, but not a token, we want to fetch a token - if TOKEN not in self.properties and CREDENTIAL in self.properties: - self.properties[TOKEN] = self._fetch_access_token(session, self.properties[CREDENTIAL]) - - # Set Auth token for subsequent calls in the session - if token := self.properties.get(TOKEN): - session.headers[AUTHORIZATION_HEADER] = f"{BEARER_PREFIX} {token}" + self._refresh_token(session, self.properties.get(TOKEN)) # Set HTTP headers session.headers["Content-type"] = "application/json" @@ -439,6 +447,18 @@ def _response_to_table(self, identifier_tuple: Tuple[str, ...], table_response: catalog=self, ) + def _refresh_token(self, session: Optional[Session] = None, new_token: Optional[str] = None) -> None: + session = session or self._session + if new_token is not None: + self.properties[TOKEN] = new_token + elif CREDENTIAL in self.properties: + self.properties[TOKEN] = self._fetch_access_token(session, self.properties[CREDENTIAL]) + + # Set Auth token for subsequent calls in the session + if token := self.properties.get(TOKEN): + session.headers[AUTHORIZATION_HEADER] = f"{BEARER_PREFIX} {token}" + + @retry(**_RETRY_ARGS) def create_table( self, identifier: Union[str, Identifier], @@ -475,6 +495,7 @@ def create_table( table_response = TableResponse(**response.json()) return self._response_to_table(self.identifier_to_tuple(identifier), table_response) + @retry(**_RETRY_ARGS) def register_table(self, identifier: Union[str, Identifier], metadata_location: str) -> Table: """Register a new table using existing metadata. @@ -506,6 +527,7 @@ def register_table(self, identifier: Union[str, Identifier], metadata_location: table_response = TableResponse(**response.json()) return self._response_to_table(self.identifier_to_tuple(identifier), table_response) + @retry(**_RETRY_ARGS) def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: namespace_tuple = self._check_valid_namespace_identifier(namespace) namespace_concat = NAMESPACE_SEPARATOR.join(namespace_tuple) @@ -516,6 +538,7 @@ def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) return [(*table.namespace, table.name) for table in ListTablesResponse(**response.json()).identifiers] + @retry(**_RETRY_ARGS) def load_table(self, identifier: Union[str, Identifier]) -> Table: identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) response = self._session.get( @@ -529,6 +552,7 @@ def load_table(self, identifier: Union[str, Identifier]) -> Table: table_response = TableResponse(**response.json()) return self._response_to_table(identifier_tuple, table_response) + @retry(**_RETRY_ARGS) def drop_table(self, identifier: Union[str, Identifier], purge_requested: bool = False) -> None: identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) response = self._session.delete( @@ -541,9 +565,11 @@ def drop_table(self, identifier: Union[str, Identifier], purge_requested: bool = except HTTPError as exc: self._handle_non_200_response(exc, {404: NoSuchTableError}) + @retry(**_RETRY_ARGS) def purge_table(self, identifier: Union[str, Identifier]) -> None: self.drop_table(identifier=identifier, purge_requested=True) + @retry(**_RETRY_ARGS) def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: Union[str, Identifier]) -> Table: from_identifier_tuple = self.identifier_to_tuple_without_catalog(from_identifier) payload = { @@ -558,6 +584,7 @@ def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: U return self.load_table(to_identifier) + @retry(**_RETRY_ARGS) def _commit_table(self, table_request: CommitTableRequest) -> CommitTableResponse: """Update the table. @@ -588,6 +615,7 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons ) return CommitTableResponse(**response.json()) + @retry(**_RETRY_ARGS) def create_namespace(self, namespace: Union[str, Identifier], properties: Properties = EMPTY_DICT) -> None: namespace_tuple = self._check_valid_namespace_identifier(namespace) payload = {"namespace": namespace_tuple, "properties": properties} @@ -597,6 +625,7 @@ def create_namespace(self, namespace: Union[str, Identifier], properties: Proper except HTTPError as exc: self._handle_non_200_response(exc, {404: NoSuchNamespaceError, 409: NamespaceAlreadyExistsError}) + @retry(**_RETRY_ARGS) def drop_namespace(self, namespace: Union[str, Identifier]) -> None: namespace_tuple = self._check_valid_namespace_identifier(namespace) namespace = NAMESPACE_SEPARATOR.join(namespace_tuple) @@ -606,6 +635,7 @@ def drop_namespace(self, namespace: Union[str, Identifier]) -> None: except HTTPError as exc: self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) + @retry(**_RETRY_ARGS) def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identifier]: namespace_tuple = self.identifier_to_tuple(namespace) response = self._session.get( @@ -623,6 +653,7 @@ def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identi namespaces = ListNamespaceResponse(**response.json()) return [namespace_tuple + child_namespace for child_namespace in namespaces.namespaces] + @retry(**_RETRY_ARGS) def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Properties: namespace_tuple = self._check_valid_namespace_identifier(namespace) namespace = NAMESPACE_SEPARATOR.join(namespace_tuple) @@ -634,6 +665,7 @@ def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Proper return NamespaceResponse(**response.json()).properties + @retry(**_RETRY_ARGS) def update_namespace_properties( self, namespace: Union[str, Identifier], removals: Optional[Set[str]] = None, updates: Properties = EMPTY_DICT ) -> PropertiesUpdateSummary: diff --git a/pyproject.toml b/pyproject.toml index d67c512f1..0cc0decbf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ sortedcontainers = "2.4.0" fsspec = ">=2023.1.0,<2024.1.0" pyparsing = ">=3.1.0,<4.0.0" zstandard = ">=0.13.0,<1.0.0" +tenacity = ">=8.2.3,<9.0.0" pyarrow = { version = ">=9.0.0,<16.0.0", optional = true } pandas = { version = ">=1.0.0,<3.0.0", optional = true } duckdb = { version = ">=0.5.0,<1.0.0", optional = true } @@ -307,6 +308,10 @@ ignore_missing_imports = true module = "setuptools.*" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "tenacity.*" +ignore_missing_imports = true + [tool.coverage.run] source = ['pyiceberg/'] diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 248cc14d8..7ae0d1955 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -26,6 +26,7 @@ from pyiceberg.catalog import PropertiesUpdateSummary, Table, load_catalog from pyiceberg.catalog.rest import AUTH_URL, RestCatalog from pyiceberg.exceptions import ( + AuthorizationExpiredError, NamespaceAlreadyExistsError, NoSuchNamespaceError, NoSuchTableError, @@ -266,6 +267,48 @@ def test_list_namespace_with_parent_200(rest_mock: Mocker) -> None: ] +def test_list_namespaces_419(rest_mock: Mocker) -> None: + new_token = "new_jwt_token" + new_header = dict(TEST_HEADERS) + new_header["Authorization"] = f"Bearer {new_token}" + + rest_mock.post( + f"{TEST_URI}v1/namespaces", + json={ + "error": { + "message": "Authorization expired.", + "type": "AuthorizationExpiredError", + "code": 419, + } + }, + status_code=419, + request_headers=TEST_HEADERS, + ) + rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": new_token, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + ) + rest_mock.get( + f"{TEST_URI}v1/namespaces", + json={"namespaces": [["default"], ["examples"], ["fokko"], ["system"]]}, + status_code=200, + request_headers=new_header, + ) + catalog = RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN, credential=TEST_CREDENTIALS) + assert catalog.list_namespaces() == [ + ("default",), + ("examples",), + ("fokko",), + ("system",), + ] + + def test_create_namespace_200(rest_mock: Mocker) -> None: namespace = "leden" rest_mock.post( @@ -517,6 +560,35 @@ def test_create_table_409(rest_mock: Mocker, table_schema_simple: Schema) -> Non assert "Table already exists" in str(e.value) +def test_create_table_419(rest_mock: Mocker, table_schema_simple: Schema) -> None: + rest_mock.post( + f"{TEST_URI}v1/namespaces/fokko/tables", + json={ + "error": { + "message": "Authorization expired.", + "type": "AuthorizationExpiredError", + "code": 419, + } + }, + status_code=419, + request_headers=TEST_HEADERS, + ) + + with pytest.raises(AuthorizationExpiredError) as e: + RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN).create_table( + identifier=("fokko", "fokko2"), + schema=table_schema_simple, + location=None, + partition_spec=PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=TruncateTransform(width=3), name="id") + ), + sort_order=SortOrder(SortField(source_id=2, transform=IdentityTransform())), + properties={"owner": "fokko"}, + ) + assert "Authorization expired" in str(e.value) + assert rest_mock.call_count == 3 + + def test_register_table_200( rest_mock: Mocker, table_schema_simple: Schema, example_table_metadata_no_snapshot_v1_rest_json: Dict[str, Any] ) -> None: From 0c708a067c1c1f18b939c419b3ceda6ab9c39f93 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 20 Feb 2024 22:52:22 +0100 Subject: [PATCH 16/25] Reuse commit-uuid as the write-uuid (#437) * Reuse commit-uuid as the write-uuid * Fix conflicts * Cleanup * cleanup --- pyiceberg/table/__init__.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index a939294f3..d70d4380f 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -1022,7 +1022,7 @@ def append(self, df: pa.Table) -> None: with self.update_snapshot().fast_append() as update_snapshot: # skip writing data files if the dataframe is empty if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, df=df) + data_files = _dataframe_to_data_files(self, write_uuid=update_snapshot.commit_uuid, df=df) for data_file in data_files: update_snapshot.append_data_file(data_file) @@ -1052,7 +1052,7 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T with self.update_snapshot().overwrite() as update_snapshot: # skip writing data files if the dataframe is empty if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, df=df) + data_files = _dataframe_to_data_files(self, write_uuid=update_snapshot.commit_uuid, df=df) for data_file in data_files: update_snapshot.append_data_file(data_file) @@ -2349,7 +2349,9 @@ def _generate_manifest_list_path(location: str, snapshot_id: int, attempt: int, return f'{location}/metadata/snap-{snapshot_id}-{attempt}-{commit_uuid}.avro' -def _dataframe_to_data_files(table: Table, df: pa.Table, file_schema: Optional[Schema] = None) -> Iterable[DataFile]: +def _dataframe_to_data_files( + table: Table, df: pa.Table, write_uuid: Optional[uuid.UUID] = None, file_schema: Optional[Schema] = None +) -> Iterable[DataFile]: """Convert a PyArrow table into a DataFile. Returns: @@ -2360,8 +2362,8 @@ def _dataframe_to_data_files(table: Table, df: pa.Table, file_schema: Optional[S if len(table.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - write_uuid = uuid.uuid4() counter = itertools.count(0) + write_uuid = write_uuid or uuid.uuid4() # This is an iter, so we don't have to materialize everything every time # This will be more relevant when we start doing partitioned writes @@ -2369,22 +2371,28 @@ def _dataframe_to_data_files(table: Table, df: pa.Table, file_schema: Optional[S class _MergingSnapshotProducer: + commit_uuid: uuid.UUID _operation: Operation _table: Table _snapshot_id: int _parent_snapshot_id: Optional[int] _added_data_files: List[DataFile] - _commit_uuid: uuid.UUID _transaction: Optional[Transaction] - def __init__(self, operation: Operation, table: Table, transaction: Optional[Transaction] = None) -> None: + def __init__( + self, + operation: Operation, + table: Table, + commit_uuid: Optional[uuid.UUID] = None, + transaction: Optional[Transaction] = None, + ) -> None: + self.commit_uuid = commit_uuid or uuid.uuid4() self._operation = operation self._table = table self._snapshot_id = table.new_snapshot_id() # Since we only support the main branch for now self._parent_snapshot_id = snapshot.snapshot_id if (snapshot := self._table.current_snapshot()) else None self._added_data_files = [] - self._commit_uuid = uuid.uuid4() self._transaction = transaction def __enter__(self) -> _MergingSnapshotProducer: @@ -2408,7 +2416,7 @@ def _existing_manifests(self) -> List[ManifestFile]: ... def _manifests(self) -> List[ManifestFile]: def _write_added_manifest() -> List[ManifestFile]: if self._added_data_files: - output_file_location = _new_manifest_path(location=self._table.location(), num=0, commit_uuid=self._commit_uuid) + output_file_location = _new_manifest_path(location=self._table.location(), num=0, commit_uuid=self.commit_uuid) with write_manifest( format_version=self._table.format_version, spec=self._table.spec(), @@ -2434,7 +2442,8 @@ def _write_delete_manifest() -> List[ManifestFile]: # Check if we need to mark the files as deleted deleted_entries = self._deleted_entries() if len(deleted_entries) > 0: - output_file_location = _new_manifest_path(location=self._table.location(), num=1, commit_uuid=self._commit_uuid) + output_file_location = _new_manifest_path(location=self._table.location(), num=1, commit_uuid=self.commit_uuid) + with write_manifest( format_version=self._table.format_version, spec=self._table.spec(), @@ -2477,7 +2486,7 @@ def commit(self) -> Snapshot: summary = self._summary() manifest_list_file_path = _generate_manifest_list_path( - location=self._table.location(), snapshot_id=self._snapshot_id, attempt=0, commit_uuid=self._commit_uuid + location=self._table.location(), snapshot_id=self._snapshot_id, attempt=0, commit_uuid=self.commit_uuid ) with write_manifest_list( format_version=self._table.metadata.format_version, From 4c55d74a5e0d7c3e9fbc7e11ca028f5d63804e16 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Tue, 20 Feb 2024 14:55:20 -0700 Subject: [PATCH 17/25] Update NameMapping on `update_schema()` (#441) * update name-mapping * Update __init__.py Co-authored-by: Fokko Driesprong * Update pyiceberg/table/name_mapping.py Co-authored-by: Fokko Driesprong * validation mode after * type --------- Co-authored-by: Fokko Driesprong --- pyiceberg/table/__init__.py | 16 +++-- pyiceberg/table/name_mapping.py | 97 ++++++++++++++++++++++++--- tests/integration/test_rest_schema.py | 21 +++++- tests/table/test_name_mapping.py | 73 +++++++++++++++++++- 4 files changed, 188 insertions(+), 19 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index d70d4380f..2f891cd14 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -84,11 +84,7 @@ TableMetadata, TableMetadataUtil, ) -from pyiceberg.table.name_mapping import ( - NameMapping, - create_mapping_from_schema, - parse_mapping_from_json, -) +from pyiceberg.table.name_mapping import NameMapping, parse_mapping_from_json, update_mapping from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef from pyiceberg.table.snapshots import ( Operation, @@ -994,12 +990,12 @@ def update_snapshot(self) -> UpdateSnapshot: """ return UpdateSnapshot(self) - def name_mapping(self) -> NameMapping: + def name_mapping(self) -> Optional[NameMapping]: """Return the table's field-id NameMapping.""" if name_mapping_json := self.properties.get(TableProperties.DEFAULT_NAME_MAPPING): return parse_mapping_from_json(name_mapping_json) else: - return create_mapping_from_schema(self.schema()) + return None def append(self, df: pa.Table) -> None: """ @@ -1950,6 +1946,12 @@ def commit(self) -> None: else: updates = (SetCurrentSchemaUpdate(schema_id=existing_schema_id),) # type: ignore + if name_mapping := self._table.name_mapping(): + updated_name_mapping = update_mapping(name_mapping, self._updates, self._adds) + updates += ( # type: ignore + SetPropertiesUpdate(updates={TableProperties.DEFAULT_NAME_MAPPING: updated_name_mapping.model_dump_json()}), + ) + if self._transaction is not None: self._transaction._append_updates(*updates) # pylint: disable=W0212 self._transaction._append_requirements(*requirements) # pylint: disable=W0212 diff --git a/pyiceberg/table/name_mapping.py b/pyiceberg/table/name_mapping.py index ffe96359a..94974836c 100644 --- a/pyiceberg/table/name_mapping.py +++ b/pyiceberg/table/name_mapping.py @@ -26,7 +26,7 @@ from abc import ABC, abstractmethod from collections import ChainMap from functools import cached_property, singledispatch -from typing import Any, Dict, Generic, List, TypeVar, Union +from typing import Any, Dict, Generic, List, Optional, TypeVar, Union from pydantic import Field, conlist, field_validator, model_serializer @@ -45,6 +45,18 @@ class MappedField(IcebergBaseModel): def convert_null_to_empty_List(cls, v: Any) -> Any: return v or [] + @field_validator('names', mode='after') + @classmethod + def check_at_least_one(cls, v: List[str]) -> Any: + """ + Conlist constraint does not seem to be validating the class on instantiation. + + Adding a custom validator to enforce min_length=1 constraint. + """ + if len(v) < 1: + raise ValueError("At least one mapped name must be provided for the field") + return v + @model_serializer def ser_model(self) -> Dict[str, Any]: """Set custom serializer to leave out the field when it is empty.""" @@ -93,24 +105,25 @@ def __str__(self) -> str: return "[\n " + "\n ".join([str(e) for e in self.root]) + "\n]" +S = TypeVar('S') T = TypeVar("T") -class NameMappingVisitor(Generic[T], ABC): +class NameMappingVisitor(Generic[S, T], ABC): @abstractmethod - def mapping(self, nm: NameMapping, field_results: T) -> T: + def mapping(self, nm: NameMapping, field_results: S) -> S: """Visit a NameMapping.""" @abstractmethod - def fields(self, struct: List[MappedField], field_results: List[T]) -> T: + def fields(self, struct: List[MappedField], field_results: List[T]) -> S: """Visit a List[MappedField].""" @abstractmethod - def field(self, field: MappedField, field_result: T) -> T: + def field(self, field: MappedField, field_result: S) -> T: """Visit a MappedField.""" -class _IndexByName(NameMappingVisitor[Dict[str, MappedField]]): +class _IndexByName(NameMappingVisitor[Dict[str, MappedField], Dict[str, MappedField]]): def mapping(self, nm: NameMapping, field_results: Dict[str, MappedField]) -> Dict[str, MappedField]: return field_results @@ -129,18 +142,18 @@ def field(self, field: MappedField, field_result: Dict[str, MappedField]) -> Dic @singledispatch -def visit_name_mapping(obj: Union[NameMapping, List[MappedField], MappedField], visitor: NameMappingVisitor[T]) -> T: +def visit_name_mapping(obj: Union[NameMapping, List[MappedField], MappedField], visitor: NameMappingVisitor[S, T]) -> S: """Traverse the name mapping in post-order traversal.""" raise NotImplementedError(f"Cannot visit non-type: {obj}") @visit_name_mapping.register(NameMapping) -def _(obj: NameMapping, visitor: NameMappingVisitor[T]) -> T: +def _(obj: NameMapping, visitor: NameMappingVisitor[S, T]) -> S: return visitor.mapping(obj, visit_name_mapping(obj.root, visitor)) @visit_name_mapping.register(list) -def _(fields: List[MappedField], visitor: NameMappingVisitor[T]) -> T: +def _(fields: List[MappedField], visitor: NameMappingVisitor[S, T]) -> S: results = [visitor.field(field, visit_name_mapping(field.fields, visitor)) for field in fields] return visitor.fields(fields, results) @@ -175,5 +188,71 @@ def primitive(self, primitive: PrimitiveType) -> List[MappedField]: return [] +class _UpdateMapping(NameMappingVisitor[List[MappedField], MappedField]): + _updates: Dict[int, NestedField] + _adds: Dict[int, List[NestedField]] + + def __init__(self, updates: Dict[int, NestedField], adds: Dict[int, List[NestedField]]): + self._updates = updates + self._adds = adds + + @staticmethod + def _remove_reassigned_names(field: MappedField, assignments: Dict[str, int]) -> Optional[MappedField]: + removed_names = set() + for name in field.names: + if (assigned_id := assignments.get(name)) and assigned_id != field.field_id: + removed_names.add(name) + + remaining_names = [f for f in field.names if f not in removed_names] + if remaining_names: + return MappedField(field_id=field.field_id, names=remaining_names, fields=field.fields) + else: + return None + + def _add_new_fields(self, mapped_fields: List[MappedField], parent_id: int) -> List[MappedField]: + if fields_to_add := self._adds.get(parent_id): + fields: List[MappedField] = [] + new_fields: List[MappedField] = [] + + for add in fields_to_add: + new_fields.append( + MappedField(field_id=add.field_id, names=[add.name], fields=visit(add.field_type, _CreateMapping())) + ) + + reassignments = {f.name: f.field_id for f in fields_to_add} + fields = [ + updated_field + for field in mapped_fields + if (updated_field := self._remove_reassigned_names(field, reassignments)) is not None + ] + new_fields + return fields + else: + return mapped_fields + + def mapping(self, nm: NameMapping, field_results: List[MappedField]) -> List[MappedField]: + return self._add_new_fields(field_results, -1) + + def fields(self, struct: List[MappedField], field_results: List[MappedField]) -> List[MappedField]: + reassignments: Dict[str, int] = { + update.name: update.field_id for f in field_results if (update := self._updates.get(f.field_id)) + } + return [ + updated_field + for field in field_results + if (updated_field := self._remove_reassigned_names(field, reassignments)) is not None + ] + + def field(self, field: MappedField, field_result: List[MappedField]) -> MappedField: + field_names = field.names + if (update := self._updates.get(field.field_id)) is not None and update.name not in field_names: + field_names.append(update.name) + + return MappedField(field_id=field.field_id, names=field_names, fields=self._add_new_fields(field_result, field.field_id)) + + def create_mapping_from_schema(schema: Schema) -> NameMapping: return NameMapping(visit(schema, _CreateMapping())) + + +def update_mapping(mapping: NameMapping, updates: Dict[int, NestedField], adds: Dict[int, List[NestedField]]) -> NameMapping: + return NameMapping(visit_name_mapping(mapping, _UpdateMapping(updates, adds))) diff --git a/tests/integration/test_rest_schema.py b/tests/integration/test_rest_schema.py index a3320e4d3..aae07caba 100644 --- a/tests/integration/test_rest_schema.py +++ b/tests/integration/test_rest_schema.py @@ -22,7 +22,8 @@ from pyiceberg.exceptions import CommitFailedException, NoSuchTableError, ValidationError from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema, prune_columns -from pyiceberg.table import Table, UpdateSchema +from pyiceberg.table import Table, TableProperties, UpdateSchema +from pyiceberg.table.name_mapping import MappedField, NameMapping, create_mapping_from_schema from pyiceberg.table.sorting import SortField, SortOrder from pyiceberg.transforms import IdentityTransform from pyiceberg.types import ( @@ -73,7 +74,11 @@ def _create_table_with_schema(catalog: Catalog, schema: Schema) -> Table: catalog.drop_table(tbl_name) except NoSuchTableError: pass - return catalog.create_table(identifier=tbl_name, schema=schema) + return catalog.create_table( + identifier=tbl_name, + schema=schema, + properties={TableProperties.DEFAULT_NAME_MAPPING: create_mapping_from_schema(schema).model_dump_json()}, + ) @pytest.mark.integration @@ -674,6 +679,13 @@ def test_rename_simple(simple_table: Table) -> None: identifier_field_ids=[2], ) + # Check that the name mapping gets updated + assert simple_table.name_mapping() == NameMapping([ + MappedField(field_id=1, names=['foo', 'vo']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + ]) + @pytest.mark.integration def test_rename_simple_nested(catalog: Catalog) -> None: @@ -701,6 +713,11 @@ def test_rename_simple_nested(catalog: Catalog) -> None: ), ) + # Check that the name mapping gets updated + assert tbl.name_mapping() == NameMapping([ + MappedField(field_id=1, names=['foo'], fields=[MappedField(field_id=2, names=['bar', 'vo'])]), + ]) + @pytest.mark.integration def test_rename_simple_nested_with_dots(catalog: Catalog) -> None: diff --git a/tests/table/test_name_mapping.py b/tests/table/test_name_mapping.py index d74aa3234..e039415ce 100644 --- a/tests/table/test_name_mapping.py +++ b/tests/table/test_name_mapping.py @@ -17,7 +17,14 @@ import pytest from pyiceberg.schema import Schema -from pyiceberg.table.name_mapping import MappedField, NameMapping, create_mapping_from_schema, parse_mapping_from_json +from pyiceberg.table.name_mapping import ( + MappedField, + NameMapping, + create_mapping_from_schema, + parse_mapping_from_json, + update_mapping, +) +from pyiceberg.types import NestedField, StringType @pytest.fixture(scope="session") @@ -238,3 +245,67 @@ def test_mapping_lookup_by_name(table_name_mapping_nested: NameMapping) -> None: with pytest.raises(ValueError, match="Could not find field with name: boom"): table_name_mapping_nested.find("boom") + + +def test_invalid_mapped_field() -> None: + with pytest.raises(ValueError): + MappedField(field_id=1, names=[]) + + +def test_update_mapping_no_updates_or_adds(table_name_mapping_nested: NameMapping) -> None: + assert update_mapping(table_name_mapping_nested, {}, {}) == table_name_mapping_nested + + +def test_update_mapping(table_name_mapping_nested: NameMapping) -> None: + updates = {1: NestedField(1, "foo_update", StringType(), True)} + adds = { + -1: [NestedField(18, "add_18", StringType(), True)], + 15: [NestedField(19, "name", StringType(), True), NestedField(20, "add_20", StringType(), True)], + } + + expected = NameMapping([ + MappedField(field_id=1, names=['foo', 'foo_update']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + MappedField(field_id=4, names=['qux'], fields=[MappedField(field_id=5, names=['element'])]), + MappedField( + field_id=6, + names=['quux'], + fields=[ + MappedField(field_id=7, names=['key']), + MappedField( + field_id=8, + names=['value'], + fields=[ + MappedField(field_id=9, names=['key']), + MappedField(field_id=10, names=['value']), + ], + ), + ], + ), + MappedField( + field_id=11, + names=['location'], + fields=[ + MappedField( + field_id=12, + names=['element'], + fields=[ + MappedField(field_id=13, names=['latitude']), + MappedField(field_id=14, names=['longitude']), + ], + ) + ], + ), + MappedField( + field_id=15, + names=['person'], + fields=[ + MappedField(field_id=17, names=['age']), + MappedField(field_id=19, names=['name']), + MappedField(field_id=20, names=['add_20']), + ], + ), + MappedField(field_id=18, names=['add_18']), + ]) + assert update_mapping(table_name_mapping_nested, updates, adds) == expected From f4de1d179b14265fc0e37ad17408cb36c93e01bd Mon Sep 17 00:00:00 2001 From: Hussein Awala Date: Tue, 20 Feb 2024 23:03:47 +0100 Subject: [PATCH 18/25] Implement `create_table_if_not_exists` (#415) * Feat: Add fail_if_exists param to create_table * create create_table_if_not_exists method * fix reset test * fix mypy check --- pyiceberg/catalog/__init__.py | 30 ++++++++++- tests/catalog/integration_test_dynamodb.py | 9 ++++ tests/catalog/integration_test_glue.py | 9 ++++ tests/catalog/test_dynamodb.py | 12 +++++ tests/catalog/test_rest.py | 60 +++++++++++++++++++++- tests/catalog/test_sql.py | 17 ++++++ 6 files changed, 135 insertions(+), 2 deletions(-) diff --git a/pyiceberg/catalog/__init__.py b/pyiceberg/catalog/__init__.py index 6e5dc2748..f7e34ab23 100644 --- a/pyiceberg/catalog/__init__.py +++ b/pyiceberg/catalog/__init__.py @@ -36,7 +36,7 @@ cast, ) -from pyiceberg.exceptions import NoSuchNamespaceError, NoSuchTableError, NotInstalledError +from pyiceberg.exceptions import NoSuchNamespaceError, NoSuchTableError, NotInstalledError, TableAlreadyExistsError from pyiceberg.io import FileIO, load_file_io from pyiceberg.manifest import ManifestFile from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec @@ -315,6 +315,34 @@ def create_table( TableAlreadyExistsError: If a table with the name already exists. """ + def create_table_if_not_exists( + self, + identifier: Union[str, Identifier], + schema: Union[Schema, "pa.Schema"], + location: Optional[str] = None, + partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, + sort_order: SortOrder = UNSORTED_SORT_ORDER, + properties: Properties = EMPTY_DICT, + ) -> Table: + """Create a table if it does not exist. + + Args: + identifier (str | Identifier): Table identifier. + schema (Schema): Table's schema. + location (str | None): Location for the table. Optional Argument. + partition_spec (PartitionSpec): PartitionSpec for the table. + sort_order (SortOrder): SortOrder for the table. + properties (Properties): Table properties that can be a string based dictionary. + + Returns: + Table: the created table instance if the table does not exist, else the existing + table instance. + """ + try: + return self.create_table(identifier, schema, location, partition_spec, sort_order, properties) + except TableAlreadyExistsError: + return self.load_table(identifier) + @abstractmethod def load_table(self, identifier: Union[str, Identifier]) -> Table: """Load the table's metadata and returns the table instance. diff --git a/tests/catalog/integration_test_dynamodb.py b/tests/catalog/integration_test_dynamodb.py index 5ca8767d6..591e489b8 100644 --- a/tests/catalog/integration_test_dynamodb.py +++ b/tests/catalog/integration_test_dynamodb.py @@ -96,6 +96,15 @@ def test_create_duplicated_table(test_catalog: Catalog, table_schema_nested: Sch test_catalog.create_table((database_name, table_name), table_schema_nested) +def test_create_table_if_not_exists_duplicated_table( + test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + test_catalog.create_namespace(database_name) + table1 = test_catalog.create_table((database_name, table_name), table_schema_nested) + table2 = test_catalog.create_table_if_not_exists((database_name, table_name), table_schema_nested) + assert table1.identifier == table2.identifier + + def test_load_table(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog.create_namespace(database_name) diff --git a/tests/catalog/integration_test_glue.py b/tests/catalog/integration_test_glue.py index a56e4c6aa..a685b7da7 100644 --- a/tests/catalog/integration_test_glue.py +++ b/tests/catalog/integration_test_glue.py @@ -200,6 +200,15 @@ def test_create_duplicated_table(test_catalog: Catalog, table_schema_nested: Sch test_catalog.create_table((database_name, table_name), table_schema_nested) +def test_create_table_if_not_exists_duplicated_table( + test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str +) -> None: + test_catalog.create_namespace(database_name) + table1 = test_catalog.create_table((database_name, table_name), table_schema_nested) + table2 = test_catalog.create_table_if_not_exists((database_name, table_name), table_schema_nested) + assert table1.identifier == table2.identifier + + def test_load_table(test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str) -> None: identifier = (database_name, table_name) test_catalog.create_namespace(database_name) diff --git a/tests/catalog/test_dynamodb.py b/tests/catalog/test_dynamodb.py index 2cb7c46a1..218b0e8be 100644 --- a/tests/catalog/test_dynamodb.py +++ b/tests/catalog/test_dynamodb.py @@ -176,6 +176,18 @@ def test_create_duplicated_table( test_catalog.create_table(identifier, table_schema_nested) +@mock_aws +def test_create_table_if_not_exists_duplicated_table( + _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + identifier = (database_name, table_name) + test_catalog = DynamoDbCatalog("test_ddb_catalog", **{"warehouse": f"s3://{BUCKET_NAME}", "s3.endpoint": moto_endpoint_url}) + test_catalog.create_namespace(namespace=database_name) + table1 = test_catalog.create_table(identifier, table_schema_nested) + table2 = test_catalog.create_table_if_not_exists(identifier, table_schema_nested) + assert table1.identifier == table2.identifier + + @mock_aws def test_load_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 7ae0d1955..2b698d9c1 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -16,7 +16,7 @@ # under the License. # pylint: disable=redefined-outer-name,unused-argument import os -from typing import Any, Dict, cast +from typing import Any, Callable, Dict, cast from unittest import mock import pytest @@ -560,6 +560,64 @@ def test_create_table_409(rest_mock: Mocker, table_schema_simple: Schema) -> Non assert "Table already exists" in str(e.value) +def test_create_table_if_not_exists_200( + rest_mock: Mocker, table_schema_simple: Schema, example_table_metadata_no_snapshot_v1_rest_json: Dict[str, Any] +) -> None: + def json_callback() -> Callable[[Any, Any], Dict[str, Any]]: + call_count = 0 + + def callback(request: Any, context: Any) -> Dict[str, Any]: + nonlocal call_count + call_count += 1 + + if call_count == 1: + context.status_code = 200 + return example_table_metadata_no_snapshot_v1_rest_json + else: + context.status_code = 409 + return { + "error": { + "message": "Table already exists: fokko.already_exists in warehouse 8bcb0838-50fc-472d-9ddb-8feb89ef5f1e", + "type": "AlreadyExistsException", + "code": 409, + } + } + + return callback + + rest_mock.post( + f"{TEST_URI}v1/namespaces/fokko/tables", + json=json_callback(), + request_headers=TEST_HEADERS, + ) + rest_mock.get( + f"{TEST_URI}v1/namespaces/fokko/tables/fokko2", + json=example_table_metadata_no_snapshot_v1_rest_json, + status_code=200, + request_headers=TEST_HEADERS, + ) + catalog = RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN) + table1 = catalog.create_table( + identifier=("fokko", "fokko2"), + schema=table_schema_simple, + location=None, + partition_spec=PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=TruncateTransform(width=3), name="id"), spec_id=1 + ), + sort_order=SortOrder(SortField(source_id=2, transform=IdentityTransform())), + properties={"owner": "fokko"}, + ) + table2 = catalog.create_table_if_not_exists( + identifier=("fokko", "fokko2"), + schema=table_schema_simple, + location=None, + partition_spec=PartitionSpec(PartitionField(source_id=1, field_id=1000, transform=TruncateTransform(width=3), name="id")), + sort_order=SortOrder(SortField(source_id=2, transform=IdentityTransform())), + properties={"owner": "fokko"}, + ) + assert table1 == table2 + + def test_create_table_419(rest_mock: Mocker, table_schema_simple: Schema) -> None: rest_mock.post( f"{TEST_URI}v1/namespaces/fokko/tables", diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index f6ff78283..bc75eb630 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -247,6 +247,23 @@ def test_create_duplicated_table(catalog: SqlCatalog, table_schema_nested: Schem catalog.create_table(random_identifier, table_schema_nested) +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + ], +) +def test_create_table_if_not_exists_duplicated_table( + catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier +) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table1 = catalog.create_table(random_identifier, table_schema_nested) + table2 = catalog.create_table_if_not_exists(random_identifier, table_schema_nested) + assert table1.identifier == table2.identifier + + @pytest.mark.parametrize( 'catalog', [ From 088d504e6e7745f6c56f1864b3270885722d77bc Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 22 Feb 2024 13:59:09 +0000 Subject: [PATCH 19/25] fix --- Makefile | 2 +- poetry.lock | 2 +- pyiceberg/catalog/rest.py | 13 ------------- 3 files changed, 2 insertions(+), 15 deletions(-) diff --git a/Makefile b/Makefile index 6af6ab793..2153d20ad 100644 --- a/Makefile +++ b/Makefile @@ -37,7 +37,7 @@ test-s3: poetry run pytest tests/ -m s3 ${PYTEST_ARGS} test-api: - poetry run pytest tests/ -m api ${PYTEST_ARGS} + poetry run pytest tests/ -m api -vv ${PYTEST_ARGS} test-integration: docker-compose -f dev/docker-compose-integration.yml kill diff --git a/poetry.lock b/poetry.lock index 54f283d86..e0d5c37b3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4373,4 +4373,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "5553acdf7ad32ec9dbc74523b9f7ff241907b0d8129a40e3052750c3182e7539" \ No newline at end of file +content-hash = "5553acdf7ad32ec9dbc74523b9f7ff241907b0d8129a40e3052750c3182e7539" diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 72e006a65..c824e6913 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -132,19 +132,6 @@ def _retry_hook(retry_state: RetryCallState) -> None: } -def _retry_hook(retry_state: RetryCallState) -> None: - rest_catalog: RestCatalog = retry_state.args[0] - rest_catalog._refresh_token() # pylint: disable=protected-access - - -_RETRY_ARGS = { - "retry": retry_if_exception_type(AuthorizationExpiredError), - "stop": stop_after_attempt(2), - "before": _retry_hook, - "reraise": True, -} - - class TableResponse(IcebergBaseModel): metadata_location: str = Field(alias="metadata-location") metadata: TableMetadata From 4d6cde0497ac3adebf7e57ffa20a7a71ba6c8a67 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 22 Feb 2024 15:15:32 +0000 Subject: [PATCH 20/25] exclusion-list --- poetry.lock | 4 +- pyproject.toml | 469 +++++++++++++++++++++++++ tests/api/exclude/pyiceberg-0.6.0.yaml | 5 + tests/api/test_api.py | 45 ++- 4 files changed, 515 insertions(+), 8 deletions(-) create mode 100644 tests/api/exclude/pyiceberg-0.6.0.yaml diff --git a/poetry.lock b/poetry.lock index e0d5c37b3..7fbd07658 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2521,8 +2521,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -4373,4 +4373,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "5553acdf7ad32ec9dbc74523b9f7ff241907b0d8129a40e3052750c3182e7539" +content-hash = "ae2bc43b1789ecb1b25661e80d9e127fff7e80019ce67206cfa253c8a4dbb397" diff --git a/pyproject.toml b/pyproject.toml index 090997356..dbee45a22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,6 +87,7 @@ pytest-mock = "3.12.0" pyspark = "3.5.0" cython = "3.0.8" griffe = "^0.40.0" +pyyaml = "^6.0.1" [[tool.mypy.overrides]] module = "pytest_mock.*" @@ -96,6 +97,474 @@ ignore_missing_imports = true module = "griffe.*" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "pyarrow.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pandas.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "snappy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "zstandard.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic_core.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pytest.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fastavro.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mmh3.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "hive_metastore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "thrift.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "requests_mock.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "click.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "rich.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fsspec.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "s3fs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "azure.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "adlfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "gcsfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "packaging.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tests.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "boto3" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "botocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mypy_boto3_glue.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "moto" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiobotocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiohttp.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "duckdb.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "ray.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "daft.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyparsing.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyspark.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "strictyaml.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sortedcontainers.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "numpy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sqlalchemy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "Cython.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "setuptools.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tenacity.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyarrow.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pandas.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "snappy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "zstandard.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic_core.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pytest.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fastavro.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mmh3.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "hive_metastore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "thrift.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "requests_mock.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "click.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "rich.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fsspec.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "s3fs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "azure.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "adlfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "gcsfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "packaging.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tests.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "boto3" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "botocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mypy_boto3_glue.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "moto" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiobotocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiohttp.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "duckdb.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "ray.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "daft.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyparsing.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyspark.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "strictyaml.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sortedcontainers.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "numpy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sqlalchemy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "Cython.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "setuptools.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tenacity.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyarrow.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pandas.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "snappy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "zstandard.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic_core.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pytest.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fastavro.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mmh3.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "hive_metastore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "thrift.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "requests_mock.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "click.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "rich.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fsspec.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "s3fs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "azure.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "adlfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "gcsfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "packaging.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tests.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "boto3" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "botocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mypy_boto3_glue.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "moto" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiobotocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiohttp.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "duckdb.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "ray.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "daft.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyparsing.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyspark.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "strictyaml.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sortedcontainers.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "numpy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sqlalchemy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "Cython.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "setuptools.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tenacity.*" +ignore_missing_imports = true + [tool.poetry.scripts] pyiceberg = "pyiceberg.cli.console:run" diff --git a/tests/api/exclude/pyiceberg-0.6.0.yaml b/tests/api/exclude/pyiceberg-0.6.0.yaml new file mode 100644 index 000000000..31c590a30 --- /dev/null +++ b/tests/api/exclude/pyiceberg-0.6.0.yaml @@ -0,0 +1,5 @@ +exclude: +- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder + kind: CLASS_REMOVED_BASE +- obj_path: pyiceberg.table.create_mapping_from_schema + kind: OBJECT_REMOVED \ No newline at end of file diff --git a/tests/api/test_api.py b/tests/api/test_api.py index b0c9917f0..c58a5b150 100644 --- a/tests/api/test_api.py +++ b/tests/api/test_api.py @@ -17,14 +17,18 @@ import re import subprocess +from collections import namedtuple +from importlib import resources import griffe import pytest +import yaml from griffe.exceptions import GitError +Exclusion = namedtuple('Exclusion', 'obj_path, kind') -@pytest.mark.api -def test_breaking_change() -> None: + +def fetch_tags() -> None: check_if_upstream = subprocess.run( ["git", "remote", "get-url", "origin"], text=True, @@ -48,6 +52,8 @@ def test_breaking_change() -> None: if fetch_tags.returncode != 0: raise GitError(f"Cannot fetch Git tags with command: {fetch_cmd} from origin: {origin}") + +def get_latest_release() -> str: list_tags = subprocess.run( ["git", "tag", "-l", "--sort=-version:refname"], text=True, @@ -64,13 +70,40 @@ def test_breaking_change() -> None: p = re.compile(r"^pyiceberg-\d+.\d+.\d+$") releases = [t for t in tags if p.match(t)] + return releases[0] + + +@pytest.mark.api +def test_breaking_change() -> None: + """ + Check for breaking changes since the latest release. + + We first fetch the tags from the parent repository, to get + the commit reference of the latest release. Then, we check for + breaking changes that aren't specified in the exclusion list. + + If a breaking change is intended, the Exclusion can be added as: + ``` + - obj_path: pyiceberg.path_to_object + kind: griffe.enumerations.BreakageKind.name + ``` + to the corresponding "tests.api.exclude..yaml" file. + """ + fetch_tags() + latest_release = get_latest_release() + + previous = griffe.load_git("pyiceberg", ref=latest_release) + current = griffe.load("pyiceberg") + + with resources.files("tests.api.exclude").joinpath(f"{latest_release}.yaml").open("r") as exclude_file: + exclusion_list = yaml.safe_load(exclude_file)["exclude"] - previous = griffe.load_git("pyiceberg", ref=releases[0]) - current = griffe.load_git("pyiceberg") + exclusion_set = {Exclusion(**x) for x in exclusion_list} breaking_changes = [] for breakage in griffe.find_breaking_changes(previous, current): - breaking_changes.append(breakage.as_dict()) + if Exclusion(obj_path=breakage.obj.path, kind=breakage.kind.name) not in exclusion_set: + breaking_changes.append(breakage.as_dict()) - assert not breaking_changes, f"Total of {len(breaking_changes)} breaking API changes since {releases[0]}" + assert not breaking_changes, f"Total of {len(breaking_changes)} breaking API changes since {latest_release}" From b0460d6a07151d32630a4e7a77043a4eb37e63ed Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 22 Feb 2024 15:18:36 +0000 Subject: [PATCH 21/25] license --- tests/api/exclude/pyiceberg-0.6.0.yaml | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/tests/api/exclude/pyiceberg-0.6.0.yaml b/tests/api/exclude/pyiceberg-0.6.0.yaml index 31c590a30..0f3b7cbc0 100644 --- a/tests/api/exclude/pyiceberg-0.6.0.yaml +++ b/tests/api/exclude/pyiceberg-0.6.0.yaml @@ -1,5 +1,27 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# The format of this file is documented at +# https://cwiki.apache.org/confluence/display/INFRA/Git+-+.asf.yaml+features + exclude: - obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder kind: CLASS_REMOVED_BASE - obj_path: pyiceberg.table.create_mapping_from_schema - kind: OBJECT_REMOVED \ No newline at end of file + kind: OBJECT_REMOVED From 3dc603105a2958d7ff3288a26646e087fc36f47a Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 22 Feb 2024 16:30:22 +0000 Subject: [PATCH 22/25] fix --- poetry.lock | 62 ++-- pyproject.toml | 473 +++++++++++++++++++++++++ tests/api/exclude/pyiceberg-0.6.0.yaml | 20 ++ tests/api/test_api.py | 2 +- 4 files changed, 524 insertions(+), 33 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7fbd07658..8a67e1342 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1591,21 +1591,21 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-resources" -version = "5.13.0" +version = "6.1.1" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"}, - {file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"}, + {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"}, + {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -1780,21 +1780,21 @@ format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validat format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] -name = "jsonschema-spec" -version = "0.1.3" +name = "jsonschema-path" +version = "0.3.2" description = "JSONSchema Spec with object-oriented paths" optional = false -python-versions = ">=3.7.0,<4.0.0" +python-versions = ">=3.8.0,<4.0.0" files = [ - {file = "jsonschema_spec-0.1.3-py3-none-any.whl", hash = "sha256:b3cde007ad65c2e631e2f8653cf187124a2c714d02d9fafbab68ad64bf5745d6"}, - {file = "jsonschema_spec-0.1.3.tar.gz", hash = "sha256:8d8db7c255e524fab1016a952a9143e5b6e3c074f4ed25d1878f8e97806caec0"}, + {file = "jsonschema_path-0.3.2-py3-none-any.whl", hash = "sha256:271aedfefcd161a0f467bdf23e1d9183691a61eaabf4b761046a914e369336c7"}, + {file = "jsonschema_path-0.3.2.tar.gz", hash = "sha256:4d0dababf341e36e9b91a5fb2a3e3fd300b0150e7fe88df4e55cc8253c5a3989"}, ] [package.dependencies] -jsonschema = ">=4.0.0,<5.0.0" pathable = ">=0.4.1,<0.5.0" PyYAML = ">=5.1" -typing-extensions = ">=4.3.0,<5.0.0" +referencing = ">=0.28.0,<0.32.0" +requests = ">=2.31.0,<3.0.0" [[package]] name = "jsonschema-specifications" @@ -2439,39 +2439,37 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "openapi-schema-validator" -version = "0.4.3" +version = "0.6.2" description = "OpenAPI schema validation for Python" optional = false -python-versions = ">=3.7.0,<4.0.0" +python-versions = ">=3.8.0,<4.0.0" files = [ - {file = "openapi_schema_validator-0.4.3-py3-none-any.whl", hash = "sha256:f1eff2a7936546a3ce62b88a17d09de93c9bd229cbc43cb696c988a61a382548"}, - {file = "openapi_schema_validator-0.4.3.tar.gz", hash = "sha256:6940dba9f4906c97078fea6fd9d5a3a3384207db368c4e32f6af6abd7c5c560b"}, + {file = "openapi_schema_validator-0.6.2-py3-none-any.whl", hash = "sha256:c4887c1347c669eb7cded9090f4438b710845cd0f90d1fb9e1b3303fb37339f8"}, + {file = "openapi_schema_validator-0.6.2.tar.gz", hash = "sha256:11a95c9c9017912964e3e5f2545a5b11c3814880681fcacfb73b1759bb4f2804"}, ] [package.dependencies] -jsonschema = ">=4.0.0,<5.0.0" +jsonschema = ">=4.19.1,<5.0.0" +jsonschema-specifications = ">=2023.5.2,<2024.0.0" rfc3339-validator = "*" [[package]] name = "openapi-spec-validator" -version = "0.5.5" +version = "0.7.1" description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" optional = false -python-versions = ">=3.7.0,<4.0.0" +python-versions = ">=3.8.0,<4.0.0" files = [ - {file = "openapi_spec_validator-0.5.5-py3-none-any.whl", hash = "sha256:93ba247f585e1447214b4207728a7cce3726d148238217be69e6b8725c118fbe"}, - {file = "openapi_spec_validator-0.5.5.tar.gz", hash = "sha256:3010df5237748e25d7fac2b2aaf13457c1afd02735b2bd6f008a10079c8f443a"}, + {file = "openapi_spec_validator-0.7.1-py3-none-any.whl", hash = "sha256:3c81825043f24ccbcd2f4b149b11e8231abce5ba84f37065e14ec947d8f4e959"}, + {file = "openapi_spec_validator-0.7.1.tar.gz", hash = "sha256:8577b85a8268685da6f8aa30990b83b7960d4d1117e901d451b5d572605e5ec7"}, ] [package.dependencies] -importlib-resources = {version = ">=5.8.0,<6.0.0", markers = "python_version < \"3.9\""} -jsonschema = ">=4.0.0,<5.0.0" -jsonschema-spec = ">=0.1.1,<0.2.0" +importlib-resources = {version = ">=5.8,<7.0", markers = "python_version < \"3.9\""} +jsonschema = ">=4.18.0,<5.0.0" +jsonschema-path = ">=0.3.1,<0.4.0" lazy-object-proxy = ">=1.7.1,<2.0.0" -openapi-schema-validator = ">=0.4.2,<0.5.0" - -[package.extras] -requests = ["requests"] +openapi-schema-validator = ">=0.6.0,<0.7.0" [[package]] name = "packaging" @@ -3377,13 +3375,13 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. [[package]] name = "referencing" -version = "0.33.0" +version = "0.31.1" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, - {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, + {file = "referencing-0.31.1-py3-none-any.whl", hash = "sha256:c19c4d006f1757e3dd75c4f784d38f8698d87b649c54f9ace14e5e8c9667c01d"}, + {file = "referencing-0.31.1.tar.gz", hash = "sha256:81a1471c68c9d5e3831c30ad1dd9815c45b558e596653db751a2bfdd17b3b9ec"}, ] [package.dependencies] @@ -4373,4 +4371,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "ae2bc43b1789ecb1b25661e80d9e127fff7e80019ce67206cfa253c8a4dbb397" +content-hash = "b89fff652fddccff644fab9f6524227187866bbfa05183101c68d1121f4e0fa0" diff --git a/pyproject.toml b/pyproject.toml index dbee45a22..3e4f79e9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,6 +88,7 @@ pyspark = "3.5.0" cython = "3.0.8" griffe = "^0.40.0" pyyaml = "^6.0.1" +importlib-resources = "^6.1.1" [[tool.mypy.overrides]] module = "pytest_mock.*" @@ -97,6 +98,478 @@ ignore_missing_imports = true module = "griffe.*" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "importlib_resources.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyarrow.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pandas.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "snappy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "zstandard.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic_core.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pytest.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fastavro.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mmh3.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "hive_metastore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "thrift.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "requests_mock.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "click.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "rich.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fsspec.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "s3fs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "azure.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "adlfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "gcsfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "packaging.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tests.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "boto3" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "botocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mypy_boto3_glue.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "moto" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiobotocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiohttp.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "duckdb.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "ray.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "daft.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyparsing.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyspark.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "strictyaml.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sortedcontainers.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "numpy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sqlalchemy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "Cython.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "setuptools.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tenacity.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyarrow.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pandas.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "snappy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "zstandard.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic_core.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pytest.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fastavro.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mmh3.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "hive_metastore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "thrift.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "requests_mock.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "click.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "rich.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fsspec.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "s3fs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "azure.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "adlfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "gcsfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "packaging.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tests.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "boto3" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "botocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mypy_boto3_glue.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "moto" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiobotocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiohttp.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "duckdb.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "ray.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "daft.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyparsing.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyspark.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "strictyaml.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sortedcontainers.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "numpy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sqlalchemy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "Cython.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "setuptools.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tenacity.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyarrow.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pandas.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "snappy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "zstandard.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pydantic_core.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pytest.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fastavro.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mmh3.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "hive_metastore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "thrift.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "requests_mock.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "click.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "rich.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "fsspec.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "s3fs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "azure.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "adlfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "gcsfs.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "packaging.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tests.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "boto3" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "botocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "mypy_boto3_glue.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "moto" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiobotocore.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "aiohttp.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "duckdb.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "ray.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "daft.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyparsing.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pyspark.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "strictyaml.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sortedcontainers.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "numpy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "sqlalchemy.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "Cython.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "setuptools.*" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "tenacity.*" +ignore_missing_imports = true + [[tool.mypy.overrides]] module = "pyarrow.*" ignore_missing_imports = true diff --git a/tests/api/exclude/pyiceberg-0.6.0.yaml b/tests/api/exclude/pyiceberg-0.6.0.yaml index 0f3b7cbc0..3fb32eb33 100644 --- a/tests/api/exclude/pyiceberg-0.6.0.yaml +++ b/tests/api/exclude/pyiceberg-0.6.0.yaml @@ -25,3 +25,23 @@ exclude: kind: CLASS_REMOVED_BASE - obj_path: pyiceberg.table.create_mapping_from_schema kind: OBJECT_REMOVED +- obj_path: pyiceberg.avro.decoder_fast.BinaryDecoder + kind: OBJECT_REMOVED +- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.tell + kind: RETURN_CHANGED_TYPE +- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read + kind: RETURN_CHANGED_TYPE +- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_int + kind: RETURN_CHANGED_TYPE +- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_float + kind: RETURN_CHANGED_TYPE +- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_double + kind: RETURN_CHANGED_TYPE +- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_utf8 + kind: RETURN_CHANGED_TYPE +- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_int_bytes_dict + kind: RETURN_CHANGED_TYPE +- obj_path: CythonBinaryDecoder.read_int_bytes_dict + kind: RETURN_CHANGED_TYPE +- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_bytes + kind: RETURN_CHANGED_TYPE diff --git a/tests/api/test_api.py b/tests/api/test_api.py index c58a5b150..ed789151e 100644 --- a/tests/api/test_api.py +++ b/tests/api/test_api.py @@ -18,9 +18,9 @@ import re import subprocess from collections import namedtuple -from importlib import resources import griffe +import importlib_resources as resources import pytest import yaml from griffe.exceptions import GitError From bd152bd62a0627c53f6320503346b2f36019e779 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 29 Feb 2024 03:57:32 +0000 Subject: [PATCH 23/25] all exclusion list --- tests/api/exclude/all.yaml | 29 ++++++++++++++++++++++++++ tests/api/exclude/pyiceberg-0.6.0.yaml | 26 ++--------------------- tests/api/test_api.py | 14 ++++++------- 3 files changed, 38 insertions(+), 31 deletions(-) create mode 100644 tests/api/exclude/all.yaml diff --git a/tests/api/exclude/all.yaml b/tests/api/exclude/all.yaml new file mode 100644 index 000000000..f6eb4f966 --- /dev/null +++ b/tests/api/exclude/all.yaml @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# The format of this file is documented at +# https://cwiki.apache.org/confluence/display/INFRA/Git+-+.asf.yaml+features + + +# `griffe.load_git` and `griffe.load` functions seem to read .pyi vs .so +# when interpreting the members of a Cython class. Until this issue is +# resolved, we will ignore the Cython classes from the API compatibility check + +exclude: +- pyiceberg.avro.decoder_fast.* \ No newline at end of file diff --git a/tests/api/exclude/pyiceberg-0.6.0.yaml b/tests/api/exclude/pyiceberg-0.6.0.yaml index 3fb32eb33..160849cbf 100644 --- a/tests/api/exclude/pyiceberg-0.6.0.yaml +++ b/tests/api/exclude/pyiceberg-0.6.0.yaml @@ -21,27 +21,5 @@ # https://cwiki.apache.org/confluence/display/INFRA/Git+-+.asf.yaml+features exclude: -- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder - kind: CLASS_REMOVED_BASE -- obj_path: pyiceberg.table.create_mapping_from_schema - kind: OBJECT_REMOVED -- obj_path: pyiceberg.avro.decoder_fast.BinaryDecoder - kind: OBJECT_REMOVED -- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.tell - kind: RETURN_CHANGED_TYPE -- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read - kind: RETURN_CHANGED_TYPE -- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_int - kind: RETURN_CHANGED_TYPE -- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_float - kind: RETURN_CHANGED_TYPE -- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_double - kind: RETURN_CHANGED_TYPE -- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_utf8 - kind: RETURN_CHANGED_TYPE -- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_int_bytes_dict - kind: RETURN_CHANGED_TYPE -- obj_path: CythonBinaryDecoder.read_int_bytes_dict - kind: RETURN_CHANGED_TYPE -- obj_path: pyiceberg.avro.decoder_fast.CythonBinaryDecoder.read_bytes - kind: RETURN_CHANGED_TYPE +- pyiceberg.table.create_mapping_from_schema +- pyiceberg.cli.console.run diff --git a/tests/api/test_api.py b/tests/api/test_api.py index ed789151e..7a81650b5 100644 --- a/tests/api/test_api.py +++ b/tests/api/test_api.py @@ -15,9 +15,9 @@ # specific language governing permissions and limitations # under the License. +import fnmatch import re import subprocess -from collections import namedtuple import griffe import importlib_resources as resources @@ -25,8 +25,6 @@ import yaml from griffe.exceptions import GitError -Exclusion = namedtuple('Exclusion', 'obj_path, kind') - def fetch_tags() -> None: check_if_upstream = subprocess.run( @@ -95,15 +93,17 @@ def test_breaking_change() -> None: previous = griffe.load_git("pyiceberg", ref=latest_release) current = griffe.load("pyiceberg") - with resources.files("tests.api.exclude").joinpath(f"{latest_release}.yaml").open("r") as exclude_file: - exclusion_list = yaml.safe_load(exclude_file)["exclude"] + exclusion_list = [] - exclusion_set = {Exclusion(**x) for x in exclusion_list} + for file_name in [latest_release, "all"]: + with resources.files("tests.api.exclude").joinpath(f"{file_name}.yaml").open("r") as exclude_file: + if (exclude := yaml.safe_load(exclude_file)["exclude"]) is not None: + exclusion_list += exclude breaking_changes = [] for breakage in griffe.find_breaking_changes(previous, current): - if Exclusion(obj_path=breakage.obj.path, kind=breakage.kind.name) not in exclusion_set: + if not any(fnmatch.fnmatch(breakage.obj.path, exclude_path) for exclude_path in exclusion_list): breaking_changes.append(breakage.as_dict()) assert not breaking_changes, f"Total of {len(breaking_changes)} breaking API changes since {latest_release}" From 4d5b338096817fcb5510fd70b148d91f817937e8 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 29 Feb 2024 13:48:26 +0000 Subject: [PATCH 24/25] lint --- tests/api/exclude/all.yaml | 5 +---- tests/api/exclude/pyiceberg-0.6.0.yaml | 2 -- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/tests/api/exclude/all.yaml b/tests/api/exclude/all.yaml index f6eb4f966..ab388185d 100644 --- a/tests/api/exclude/all.yaml +++ b/tests/api/exclude/all.yaml @@ -17,13 +17,10 @@ # under the License. # -# The format of this file is documented at -# https://cwiki.apache.org/confluence/display/INFRA/Git+-+.asf.yaml+features - # `griffe.load_git` and `griffe.load` functions seem to read .pyi vs .so # when interpreting the members of a Cython class. Until this issue is # resolved, we will ignore the Cython classes from the API compatibility check exclude: -- pyiceberg.avro.decoder_fast.* \ No newline at end of file +- pyiceberg.avro.decoder_fast.* diff --git a/tests/api/exclude/pyiceberg-0.6.0.yaml b/tests/api/exclude/pyiceberg-0.6.0.yaml index 160849cbf..86e09e96b 100644 --- a/tests/api/exclude/pyiceberg-0.6.0.yaml +++ b/tests/api/exclude/pyiceberg-0.6.0.yaml @@ -17,8 +17,6 @@ # under the License. # -# The format of this file is documented at -# https://cwiki.apache.org/confluence/display/INFRA/Git+-+.asf.yaml+features exclude: - pyiceberg.table.create_mapping_from_schema From 40986faaacc81c0a82dd7a6cbd38be867452a86f Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 29 Feb 2024 15:05:08 +0000 Subject: [PATCH 25/25] more exclusions --- tests/api/exclude/pyiceberg-0.6.0.yaml | 12 ++++++++++-- tests/api/test_api.py | 4 ++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/tests/api/exclude/pyiceberg-0.6.0.yaml b/tests/api/exclude/pyiceberg-0.6.0.yaml index 86e09e96b..285cdc278 100644 --- a/tests/api/exclude/pyiceberg-0.6.0.yaml +++ b/tests/api/exclude/pyiceberg-0.6.0.yaml @@ -19,5 +19,13 @@ exclude: -- pyiceberg.table.create_mapping_from_schema -- pyiceberg.cli.console.run +- pyiceberg.table.create_mapping_from_schema # Removed import (Alias) #441 +- pyiceberg.table.Table.next_sequence_number # Table -> TableMetadata #471 +- pyiceberg.table.Table.new_snapshot_id # Table -> TableMetadata #471 +- pyiceberg.table.StaticTable.next_sequence_number # Table -> TableMetadata #471 +- pyiceberg.table.StaticTable.new_snapshot_id # Table -> TableMetadata #471 +- pyiceberg.table.Transaction.add_snapshot # removed in favor of directly using TableUpdate and TableRequirement #471 +- pyiceberg.table.Transaction.set_ref_snapshot # removed in favor of directly using TableUpdate and TableRequirement #471 +- pyiceberg.catalog.rest.TokenResponse.* # nullability change #466 +- pyiceberg.io.pyarrow.write_file # Parameter change #471 +- pyiceberg.cli.console.run # Positional parameter change #472 diff --git a/tests/api/test_api.py b/tests/api/test_api.py index 7a81650b5..5feede145 100644 --- a/tests/api/test_api.py +++ b/tests/api/test_api.py @@ -82,8 +82,8 @@ def test_breaking_change() -> None: If a breaking change is intended, the Exclusion can be added as: ``` - - obj_path: pyiceberg.path_to_object - kind: griffe.enumerations.BreakageKind.name + exclude + - pyiceberg.path_to_object # justification as comment #PRNUM ``` to the corresponding "tests.api.exclude..yaml" file. """