From 318600b5598be80d8513ed081faea13483a249a1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 31 Dec 2023 07:53:10 +0100 Subject: [PATCH 001/169] Build: Bump sqlalchemy from 2.0.23 to 2.0.24 (#244) --- poetry.lock | 106 ++++++++++++++++++++++++++-------------------------- 1 file changed, 53 insertions(+), 53 deletions(-) diff --git a/poetry.lock b/poetry.lock index f6aa09cb01..65a2c093b2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3660,60 +3660,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.23" +version = "2.0.24" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, - {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, - {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f801d85ba4753d4ed97181d003e5d3fa330ac7c4587d131f61d7f968f416862"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b35c35e3923ade1e7ac44e150dec29f5863513246c8bf85e2d7d313e3832bcfb"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9b3fd5eca3c0b137a5e0e468e24ca544ed8ca4783e0e55341b7ed2807518ee"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6209e689d0ff206c40032b6418e3cfcfc5af044b3f66e381d7f1ae301544b4"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:37e89d965b52e8b20571b5d44f26e2124b26ab63758bf1b7598a0e38fb2c4005"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6910eb4ea90c0889f363965cd3c8c45a620ad27b526a7899f0054f6c1b9219e"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-win32.whl", hash = "sha256:d8e7e8a150e7b548e7ecd6ebb9211c37265991bf2504297d9454e01b58530fc6"}, + {file = "SQLAlchemy-2.0.24-cp310-cp310-win_amd64.whl", hash = "sha256:396f05c552f7fa30a129497c41bef5b4d1423f9af8fe4df0c3dcd38f3e3b9a14"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adbd67dac4ebf54587198b63cd30c29fd7eafa8c0cab58893d9419414f8efe4b"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a0f611b431b84f55779cbb7157257d87b4a2876b067c77c4f36b15e44ced65e2"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56a0e90a959e18ac5f18c80d0cad9e90cb09322764f536e8a637426afb1cae2f"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6db686a1d9f183c639f7e06a2656af25d4ed438eda581de135d15569f16ace33"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0cc0b486a56dff72dddae6b6bfa7ff201b0eeac29d4bc6f0e9725dc3c360d71"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a1d4856861ba9e73bac05030cec5852eabfa9ef4af8e56c19d92de80d46fc34"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-win32.whl", hash = "sha256:a3c2753bf4f48b7a6024e5e8a394af49b1b12c817d75d06942cae03d14ff87b3"}, + {file = "SQLAlchemy-2.0.24-cp311-cp311-win_amd64.whl", hash = "sha256:38732884eabc64982a09a846bacf085596ff2371e4e41d20c0734f7e50525d01"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9f992e0f916201731993eab8502912878f02287d9f765ef843677ff118d0e0b1"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2587e108463cc2e5b45a896b2e7cc8659a517038026922a758bde009271aed11"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb7cedcddffca98c40bb0becd3423e293d1fef442b869da40843d751785beb3"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fa6df0e035689df89ff77a46bf8738696785d3156c2c61494acdcddc75c69d"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cc889fda484d54d0b31feec409406267616536d048a450fc46943e152700bb79"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57ef6f2cb8b09a042d0dbeaa46a30f2df5dd1e1eb889ba258b0d5d7d6011b81c"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-win32.whl", hash = "sha256:ea490564435b5b204d8154f0e18387b499ea3cedc1e6af3b3a2ab18291d85aa7"}, + {file = "SQLAlchemy-2.0.24-cp312-cp312-win_amd64.whl", hash = "sha256:ccfd336f96d4c9bbab0309f2a565bf15c468c2d8b2d277a32f89c5940f71fcf9"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9aaaaa846b10dfbe1bda71079d0e31a7e2cebedda9409fa7dba3dfed1ae803e8"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95bae3d38f8808d79072da25d5e5a6095f36fe1f9d6c614dd72c59ca8397c7c0"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04191a7c8d77e63f6fc1e8336d6c6e93176c0c010833e74410e647f0284f5a1"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:acc58b7c2e40235712d857fdfc8f2bda9608f4a850d8d9ac0dd1fc80939ca6ac"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00d76fe5d7cdb5d84d625ce002ce29fefba0bfd98e212ae66793fed30af73931"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-win32.whl", hash = "sha256:29e51f848f843bbd75d74ae64ab1ab06302cb1dccd4549d1f5afe6b4a946edb2"}, + {file = "SQLAlchemy-2.0.24-cp37-cp37m-win_amd64.whl", hash = "sha256:e9d036e343a604db3f5a6c33354018a84a1d3f6dcae3673358b404286204798c"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9bafaa05b19dc07fa191c1966c5e852af516840b0d7b46b7c3303faf1a349bc9"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e69290b921b7833c04206f233d6814c60bee1d135b09f5ae5d39229de9b46cd4"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8398593ccc4440ce6dffcc4f47d9b2d72b9fe7112ac12ea4a44e7d4de364db1"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f073321a79c81e1a009218a21089f61d87ee5fa3c9563f6be94f8b41ff181812"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9036ebfd934813990c5b9f71f297e77ed4963720db7d7ceec5a3fdb7cd2ef6ce"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcf84fe93397a0f67733aa2a38ed4eab9fc6348189fc950e656e1ea198f45668"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-win32.whl", hash = "sha256:6f5e75de91c754365c098ac08c13fdb267577ce954fa239dd49228b573ca88d7"}, + {file = "SQLAlchemy-2.0.24-cp38-cp38-win_amd64.whl", hash = "sha256:9f29c7f0f4b42337ec5a779e166946a9f86d7d56d827e771b69ecbdf426124ac"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07cc423892f2ceda9ae1daa28c0355757f362ecc7505b1ab1a3d5d8dc1c44ac6"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a479aa1ab199178ff1956b09ca8a0693e70f9c762875d69292d37049ffd0d8f"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b8d0e8578e7f853f45f4512b5c920f6a546cd4bed44137460b2a56534644205"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17e7e27af178d31b436dda6a596703b02a89ba74a15e2980c35ecd9909eea3a"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1ca7903d5e7db791a355b579c690684fac6304478b68efdc7f2ebdcfe770d8d7"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db09e424d7bb89b6215a184ca93b4f29d7f00ea261b787918a1af74143b98c06"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-win32.whl", hash = "sha256:a5cd7d30e47f87b21362beeb3e86f1b5886e7d9b0294b230dde3d3f4a1591375"}, + {file = "SQLAlchemy-2.0.24-cp39-cp39-win_amd64.whl", hash = "sha256:7ae5d44517fe81079ce75cf10f96978284a6db2642c5932a69c82dbae09f009a"}, + {file = "SQLAlchemy-2.0.24-py3-none-any.whl", hash = "sha256:8f358f5cfce04417b6ff738748ca4806fe3d3ae8040fb4e6a0c9a6973ccf9b6e"}, + {file = "SQLAlchemy-2.0.24.tar.gz", hash = "sha256:6db97656fd3fe3f7e5b077f12fa6adb5feb6e0b567a3e99f47ecf5f7ea0a09e3"}, ] [package.dependencies] @@ -3723,7 +3723,7 @@ typing-extensions = ">=4.2.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] @@ -3733,7 +3733,7 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=8)"] +oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -3743,7 +3743,7 @@ postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sshpubkeys" From 90386762b9921bc3471cefd2db39171cfe290ee4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 31 Dec 2023 07:53:31 +0100 Subject: [PATCH 002/169] Build: Bump coverage from 7.3.4 to 7.4.0 (#243) --- poetry.lock | 108 ++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 55 insertions(+), 55 deletions(-) diff --git a/poetry.lock b/poetry.lock index 65a2c093b2..592ff9db3c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -633,63 +633,63 @@ files = [ [[package]] name = "coverage" -version = "7.3.4" +version = "7.4.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aff2bd3d585969cc4486bfc69655e862028b689404563e6b549e6a8244f226df"}, - {file = "coverage-7.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4353923f38d752ecfbd3f1f20bf7a3546993ae5ecd7c07fd2f25d40b4e54571"}, - {file = "coverage-7.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea473c37872f0159294f7073f3fa72f68b03a129799f3533b2bb44d5e9fa4f82"}, - {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5214362abf26e254d749fc0c18af4c57b532a4bfde1a057565616dd3b8d7cc94"}, - {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f99b7d3f7a7adfa3d11e3a48d1a91bb65739555dd6a0d3fa68aa5852d962e5b1"}, - {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:74397a1263275bea9d736572d4cf338efaade2de9ff759f9c26bcdceb383bb49"}, - {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f154bd866318185ef5865ace5be3ac047b6d1cc0aeecf53bf83fe846f4384d5d"}, - {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e0d84099ea7cba9ff467f9c6f747e3fc3906e2aadac1ce7b41add72e8d0a3712"}, - {file = "coverage-7.3.4-cp310-cp310-win32.whl", hash = "sha256:3f477fb8a56e0c603587b8278d9dbd32e54bcc2922d62405f65574bd76eba78a"}, - {file = "coverage-7.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:c75738ce13d257efbb6633a049fb2ed8e87e2e6c2e906c52d1093a4d08d67c6b"}, - {file = "coverage-7.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:997aa14b3e014339d8101b9886063c5d06238848905d9ad6c6eabe533440a9a7"}, - {file = "coverage-7.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a9c5bc5db3eb4cd55ecb8397d8e9b70247904f8eca718cc53c12dcc98e59fc8"}, - {file = "coverage-7.3.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27ee94f088397d1feea3cb524e4313ff0410ead7d968029ecc4bc5a7e1d34fbf"}, - {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ce03e25e18dd9bf44723e83bc202114817f3367789052dc9e5b5c79f40cf59d"}, - {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85072e99474d894e5df582faec04abe137b28972d5e466999bc64fc37f564a03"}, - {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a877810ef918d0d345b783fc569608804f3ed2507bf32f14f652e4eaf5d8f8d0"}, - {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ac17b94ab4ca66cf803f2b22d47e392f0977f9da838bf71d1f0db6c32893cb9"}, - {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:36d75ef2acab74dc948d0b537ef021306796da551e8ac8b467810911000af66a"}, - {file = "coverage-7.3.4-cp311-cp311-win32.whl", hash = "sha256:47ee56c2cd445ea35a8cc3ad5c8134cb9bece3a5cb50bb8265514208d0a65928"}, - {file = "coverage-7.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:11ab62d0ce5d9324915726f611f511a761efcca970bd49d876cf831b4de65be5"}, - {file = "coverage-7.3.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:33e63c578f4acce1b6cd292a66bc30164495010f1091d4b7529d014845cd9bee"}, - {file = "coverage-7.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:782693b817218169bfeb9b9ba7f4a9f242764e180ac9589b45112571f32a0ba6"}, - {file = "coverage-7.3.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4277ddaad9293454da19121c59f2d850f16bcb27f71f89a5c4836906eb35ef"}, - {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d892a19ae24b9801771a5a989fb3e850bd1ad2e2b6e83e949c65e8f37bc67a1"}, - {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3024ec1b3a221bd10b5d87337d0373c2bcaf7afd86d42081afe39b3e1820323b"}, - {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1c3e9d2bbd6f3f79cfecd6f20854f4dc0c6e0ec317df2b265266d0dc06535f1"}, - {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e91029d7f151d8bf5ab7d8bfe2c3dbefd239759d642b211a677bc0709c9fdb96"}, - {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6879fe41c60080aa4bb59703a526c54e0412b77e649a0d06a61782ecf0853ee1"}, - {file = "coverage-7.3.4-cp312-cp312-win32.whl", hash = "sha256:fd2f8a641f8f193968afdc8fd1697e602e199931012b574194052d132a79be13"}, - {file = "coverage-7.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:d1d0ce6c6947a3a4aa5479bebceff2c807b9f3b529b637e2b33dea4468d75fc7"}, - {file = "coverage-7.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:36797b3625d1da885b369bdaaa3b0d9fb8865caed3c2b8230afaa6005434aa2f"}, - {file = "coverage-7.3.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfed0ec4b419fbc807dec417c401499ea869436910e1ca524cfb4f81cf3f60e7"}, - {file = "coverage-7.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97ff5a9fc2ca47f3383482858dd2cb8ddbf7514427eecf5aa5f7992d0571429"}, - {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:607b6c6b35aa49defaebf4526729bd5238bc36fe3ef1a417d9839e1d96ee1e4c"}, - {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8e258dcc335055ab59fe79f1dec217d9fb0cdace103d6b5c6df6b75915e7959"}, - {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a02ac7c51819702b384fea5ee033a7c202f732a2a2f1fe6c41e3d4019828c8d3"}, - {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b710869a15b8caf02e31d16487a931dbe78335462a122c8603bb9bd401ff6fb2"}, - {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6a23ae9348a7a92e7f750f9b7e828448e428e99c24616dec93a0720342f241d"}, - {file = "coverage-7.3.4-cp38-cp38-win32.whl", hash = "sha256:758ebaf74578b73f727acc4e8ab4b16ab6f22a5ffd7dd254e5946aba42a4ce76"}, - {file = "coverage-7.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:309ed6a559bc942b7cc721f2976326efbfe81fc2b8f601c722bff927328507dc"}, - {file = "coverage-7.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aefbb29dc56317a4fcb2f3857d5bce9b881038ed7e5aa5d3bcab25bd23f57328"}, - {file = "coverage-7.3.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:183c16173a70caf92e2dfcfe7c7a576de6fa9edc4119b8e13f91db7ca33a7923"}, - {file = "coverage-7.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a4184dcbe4f98d86470273e758f1d24191ca095412e4335ff27b417291f5964"}, - {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93698ac0995516ccdca55342599a1463ed2e2d8942316da31686d4d614597ef9"}, - {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb220b3596358a86361139edce40d97da7458412d412e1e10c8e1970ee8c09ab"}, - {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5b14abde6f8d969e6b9dd8c7a013d9a2b52af1235fe7bebef25ad5c8f47fa18"}, - {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:610afaf929dc0e09a5eef6981edb6a57a46b7eceff151947b836d869d6d567c1"}, - {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed790728fb71e6b8247bd28e77e99d0c276dff952389b5388169b8ca7b1c28"}, - {file = "coverage-7.3.4-cp39-cp39-win32.whl", hash = "sha256:c15fdfb141fcf6a900e68bfa35689e1256a670db32b96e7a931cab4a0e1600e5"}, - {file = "coverage-7.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:38d0b307c4d99a7aca4e00cad4311b7c51b7ac38fb7dea2abe0d182dd4008e05"}, - {file = "coverage-7.3.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b1e0f25ae99cf247abfb3f0fac7ae25739e4cd96bf1afa3537827c576b4847e5"}, - {file = "coverage-7.3.4.tar.gz", hash = "sha256:020d56d2da5bc22a0e00a5b0d54597ee91ad72446fa4cf1b97c35022f6b6dbf0"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.dependencies] @@ -4182,4 +4182,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "0e2f758b34577e0d1294391c316cb2166108c2d27b24163ef54c7a36cd7fc542" +content-hash = "1bfb3c025f6f1097a481e2f7bb81e73852bd49f7560a882feba3bc716df9ec24" diff --git a/pyproject.toml b/pyproject.toml index 9b2d5d5b75..a06bd5ea65 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest-checkdocs = "2.10.1" pytest-lazy-fixture = "0.6.3" pre-commit = "3.5.0" fastavro = "1.9.2" -coverage = { version = "^7.3.4", extras = ["toml"] } +coverage = { version = "^7.4.0", extras = ["toml"] } requests-mock = "1.11.0" moto = { version = "^4.2.12", extras = ["server"] } typing-extensions = "4.9.0" From 2272e207f5d790e665e1cce2272b0b2848c2a137 Mon Sep 17 00:00:00 2001 From: HonahX Date: Tue, 2 Jan 2024 02:27:11 +0800 Subject: [PATCH 003/169] Glue catalog commit table (#140) * Implement table metadata updater first draft * fix updater error and add tests * implement _commit_table for glue * implement apply_metadata_update which is simpler * remove old implementation * re-organize method place * fix nit * fix test * add another test * clear TODO * add a combined test * Fix merge conflict * update table metadata merged * implement requirements validation * change the exception to CommitFailedException * add docstring * use regex to parse the metadata version * fix lint issue * fix CI issue * make base_metadata optional and add null check * make base_metadata optional and add null check * add integration test * default skip-archive to true and comments * refactor tests * add doc and fix test after merge * make regex more robust, thanks Fokko! * Fix review comments, thanks Patrick! --- pyiceberg/catalog/__init__.py | 46 +++++++++- pyiceberg/catalog/glue.py | 113 +++++++++++++++++++++---- tests/catalog/integration_test_glue.py | 33 ++++++++ tests/catalog/test_glue.py | 41 +++++++++ tests/conftest.py | 4 +- 5 files changed, 218 insertions(+), 19 deletions(-) diff --git a/pyiceberg/catalog/__init__.py b/pyiceberg/catalog/__init__.py index 993be87ddd..4cb6c2ff4a 100644 --- a/pyiceberg/catalog/__init__.py +++ b/pyiceberg/catalog/__init__.py @@ -18,6 +18,7 @@ from __future__ import annotations import logging +import re import uuid from abc import ABC, abstractmethod from dataclasses import dataclass @@ -74,6 +75,17 @@ LOCATION = "location" EXTERNAL_TABLE = "EXTERNAL_TABLE" +TABLE_METADATA_FILE_NAME_REGEX = re.compile( + r""" + (\d+) # version number + - # separator + ([\w-]{36}) # UUID (36 characters, including hyphens) + (?:\.\w+)? # optional codec name + \.metadata\.json # file extension + """, + re.X, +) + class CatalogType(Enum): REST = "rest" @@ -587,8 +599,38 @@ def _write_metadata(metadata: TableMetadata, io: FileIO, metadata_path: str) -> ToOutputFile.table_metadata(metadata, io.new_output(metadata_path)) @staticmethod - def _get_metadata_location(location: str) -> str: - return f"{location}/metadata/00000-{uuid.uuid4()}.metadata.json" + def _get_metadata_location(location: str, new_version: int = 0) -> str: + if new_version < 0: + raise ValueError(f"Table metadata version: `{new_version}` must be a non-negative integer") + version_str = f"{new_version:05d}" + return f"{location}/metadata/{version_str}-{uuid.uuid4()}.metadata.json" + + @staticmethod + def _parse_metadata_version(metadata_location: str) -> int: + """Parse the version from the metadata location. + + The version is the first part of the file name, before the first dash. + For example, the version of the metadata file + `s3://bucket/db/tb/metadata/00001-6c97e413-d51b-4538-ac70-12fe2a85cb83.metadata.json` + is 1. + If the path does not comply with the pattern, the version is defaulted to be -1, ensuring + that the next metadata file is treated as having version 0. + + Args: + metadata_location (str): The location of the metadata file. + + Returns: + int: The version of the metadata file. -1 if the file name does not have valid version string + """ + file_name = metadata_location.split("/")[-1] + if file_name_match := TABLE_METADATA_FILE_NAME_REGEX.fullmatch(file_name): + try: + uuid.UUID(file_name_match.group(2)) + except ValueError: + return -1 + return int(file_name_match.group(1)) + else: + return -1 def _get_updated_props_and_update_summary( self, current_properties: Properties, removals: Optional[Set[str]], updates: Properties diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index 723de2f335..6cf9462b71 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -40,6 +40,7 @@ ICEBERG, LOCATION, METADATA_LOCATION, + PREVIOUS_METADATA_LOCATION, TABLE_TYPE, Catalog, Identifier, @@ -47,6 +48,7 @@ PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( + CommitFailedException, NamespaceAlreadyExistsError, NamespaceNotEmptyError, NoSuchIcebergTableError, @@ -59,21 +61,40 @@ from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.serializers import FromInputFile -from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table +from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, update_table_metadata from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT - -def _construct_parameters(metadata_location: str) -> Properties: - return {TABLE_TYPE: ICEBERG.upper(), METADATA_LOCATION: metadata_location} - - -def _construct_create_table_input(table_name: str, metadata_location: str, properties: Properties) -> TableInputTypeDef: +# If Glue should skip archiving an old table version when creating a new version in a commit. By +# default, Glue archives all old table versions after an UpdateTable call, but Glue has a default +# max number of archived table versions (can be increased). So for streaming use case with lots +# of commits, it is recommended to set this value to true. +GLUE_SKIP_ARCHIVE = "glue.skip-archive" +GLUE_SKIP_ARCHIVE_DEFAULT = True + + +def _construct_parameters( + metadata_location: str, glue_table: Optional[TableTypeDef] = None, prev_metadata_location: Optional[str] = None +) -> Properties: + new_parameters = glue_table.get("Parameters", {}) if glue_table else {} + new_parameters.update({TABLE_TYPE: ICEBERG.upper(), METADATA_LOCATION: metadata_location}) + if prev_metadata_location: + new_parameters[PREVIOUS_METADATA_LOCATION] = prev_metadata_location + return new_parameters + + +def _construct_table_input( + table_name: str, + metadata_location: str, + properties: Properties, + glue_table: Optional[TableTypeDef] = None, + prev_metadata_location: Optional[str] = None, +) -> TableInputTypeDef: table_input: TableInputTypeDef = { "Name": table_name, "TableType": EXTERNAL_TABLE, - "Parameters": _construct_parameters(metadata_location), + "Parameters": _construct_parameters(metadata_location, glue_table, prev_metadata_location), } if "Description" in properties: @@ -177,6 +198,28 @@ def _create_glue_table(self, database_name: str, table_name: str, table_input: T except self.glue.exceptions.EntityNotFoundException as e: raise NoSuchNamespaceError(f"Database {database_name} does not exist") from e + def _update_glue_table(self, database_name: str, table_name: str, table_input: TableInputTypeDef, version_id: str) -> None: + try: + self.glue.update_table( + DatabaseName=database_name, + TableInput=table_input, + SkipArchive=self.properties.get(GLUE_SKIP_ARCHIVE, GLUE_SKIP_ARCHIVE_DEFAULT), + VersionId=version_id, + ) + except self.glue.exceptions.EntityNotFoundException as e: + raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name} (Glue table version {version_id})") from e + except self.glue.exceptions.ConcurrentModificationException as e: + raise CommitFailedException( + f"Cannot commit {database_name}.{table_name} because Glue detected concurrent update to table version {version_id}" + ) from e + + def _get_glue_table(self, database_name: str, table_name: str) -> TableTypeDef: + try: + load_table_response = self.glue.get_table(DatabaseName=database_name, Name=table_name) + return load_table_response["Table"] + except self.glue.exceptions.EntityNotFoundException as e: + raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") from e + def create_table( self, identifier: Union[str, Identifier], @@ -215,7 +258,7 @@ def create_table( io = load_file_io(properties=self.properties, location=metadata_location) self._write_metadata(metadata, io, metadata_location) - table_input = _construct_create_table_input(table_name, metadata_location, properties) + table_input = _construct_table_input(table_name, metadata_location, properties) database_name, table_name = self.identifier_to_database_and_table(identifier) self._create_glue_table(database_name=database_name, table_name=table_name, table_input=table_input) @@ -247,8 +290,52 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: If the commit failed. """ - raise NotImplementedError + identifier_tuple = self.identifier_to_tuple_without_catalog( + tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) + ) + database_name, table_name = self.identifier_to_database_and_table(identifier_tuple) + + current_glue_table = self._get_glue_table(database_name=database_name, table_name=table_name) + glue_table_version_id = current_glue_table.get("VersionId") + if not glue_table_version_id: + raise CommitFailedException(f"Cannot commit {database_name}.{table_name} because Glue table version id is missing") + current_table = self._convert_glue_to_iceberg(glue_table=current_glue_table) + base_metadata = current_table.metadata + + # Validate the update requirements + for requirement in table_request.requirements: + requirement.validate(base_metadata) + + updated_metadata = update_table_metadata(base_metadata, table_request.updates) + if updated_metadata == base_metadata: + # no changes, do nothing + return CommitTableResponse(metadata=base_metadata, metadata_location=current_table.metadata_location) + + # write new metadata + new_metadata_version = self._parse_metadata_version(current_table.metadata_location) + 1 + new_metadata_location = self._get_metadata_location(current_table.metadata.location, new_metadata_version) + self._write_metadata(updated_metadata, current_table.io, new_metadata_location) + + update_table_input = _construct_table_input( + table_name=table_name, + metadata_location=new_metadata_location, + properties=current_table.properties, + glue_table=current_glue_table, + prev_metadata_location=current_table.metadata_location, + ) + + # Pass `version_id` to implement optimistic locking: it ensures updates are rejected if concurrent + # modifications occur. See more details at https://iceberg.apache.org/docs/latest/aws/#optimistic-locking + self._update_glue_table( + database_name=database_name, + table_name=table_name, + table_input=update_table_input, + version_id=glue_table_version_id, + ) + + return CommitTableResponse(metadata=updated_metadata, metadata_location=new_metadata_location) def load_table(self, identifier: Union[str, Identifier]) -> Table: """Load the table's metadata and returns the table instance. @@ -267,12 +354,8 @@ def load_table(self, identifier: Union[str, Identifier]) -> Table: """ identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) database_name, table_name = self.identifier_to_database_and_table(identifier_tuple, NoSuchTableError) - try: - load_table_response = self.glue.get_table(DatabaseName=database_name, Name=table_name) - except self.glue.exceptions.EntityNotFoundException as e: - raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") from e - return self._convert_glue_to_iceberg(load_table_response["Table"]) + return self._convert_glue_to_iceberg(self._get_glue_table(database_name=database_name, table_name=table_name)) def drop_table(self, identifier: Union[str, Identifier]) -> None: """Drop a table. diff --git a/tests/catalog/integration_test_glue.py b/tests/catalog/integration_test_glue.py index 2689ef14d3..99f0adac40 100644 --- a/tests/catalog/integration_test_glue.py +++ b/tests/catalog/integration_test_glue.py @@ -31,6 +31,7 @@ TableAlreadyExistsError, ) from pyiceberg.schema import Schema +from pyiceberg.types import IntegerType from tests.conftest import clean_up, get_bucket_name, get_s3_path # The number of tables/databases used in list_table/namespace test @@ -61,6 +62,7 @@ def test_create_table( assert table.identifier == (CATALOG_NAME,) + identifier metadata_location = table.metadata_location.split(get_bucket_name())[1][1:] s3.head_object(Bucket=get_bucket_name(), Key=metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 def test_create_table_with_invalid_location(table_schema_nested: Schema, table_name: str, database_name: str) -> None: @@ -82,6 +84,7 @@ def test_create_table_with_default_location( assert table.identifier == (CATALOG_NAME,) + identifier metadata_location = table.metadata_location.split(get_bucket_name())[1][1:] s3.head_object(Bucket=get_bucket_name(), Key=metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 def test_create_table_with_invalid_database(test_catalog: Catalog, table_schema_nested: Schema, table_name: str) -> None: @@ -105,6 +108,7 @@ def test_load_table(test_catalog: Catalog, table_schema_nested: Schema, table_na assert table.identifier == loaded_table.identifier assert table.metadata_location == loaded_table.metadata_location assert table.metadata == loaded_table.metadata + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 def test_list_tables(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_list: List[str]) -> None: @@ -126,6 +130,7 @@ def test_rename_table( new_table_name = f"rename-{table_name}" identifier = (database_name, table_name) table = test_catalog.create_table(identifier, table_schema_nested) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 assert table.identifier == (CATALOG_NAME,) + identifier new_identifier = (new_database_name, new_table_name) test_catalog.rename_table(identifier, new_identifier) @@ -261,3 +266,31 @@ def test_update_namespace_properties(test_catalog: Catalog, database_name: str) else: assert k in update_report.removed assert "updated test description" == test_catalog.load_namespace_properties(database_name)["comment"] + + +def test_commit_table_update_schema( + test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + identifier = (database_name, table_name) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier, table_schema_nested) + original_table_metadata = table.metadata + + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + assert original_table_metadata.current_schema_id == 0 + + transaction = table.transaction() + update = transaction.update_schema() + update.add_column(path="b", field_type=IntegerType()) + update.commit() + transaction.commit_transaction() + + updated_table_metadata = table.metadata + + assert test_catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.current_schema_id == 1 + assert len(updated_table_metadata.schemas) == 2 + new_schema = next(schema for schema in updated_table_metadata.schemas if schema.schema_id == 1) + assert new_schema + assert new_schema == update._apply() + assert new_schema.find_field("b").field_type == IntegerType() diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index 6aaeb2898c..2a33c8e23d 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -31,6 +31,7 @@ TableAlreadyExistsError, ) from pyiceberg.schema import Schema +from pyiceberg.types import IntegerType from tests.conftest import BUCKET_NAME, TABLE_METADATA_LOCATION_REGEX @@ -45,6 +46,7 @@ def test_create_table_with_database_location( table = test_catalog.create_table(identifier, table_schema_nested) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 @mock_glue @@ -58,6 +60,7 @@ def test_create_table_with_default_warehouse( table = test_catalog.create_table(identifier, table_schema_nested) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 @mock_glue @@ -73,6 +76,7 @@ def test_create_table_with_given_location( ) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 @mock_glue @@ -98,6 +102,7 @@ def test_create_table_with_strips( table = test_catalog.create_table(identifier, table_schema_nested) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 @mock_glue @@ -111,6 +116,7 @@ def test_create_table_with_strips_bucket_root( table_strip = test_catalog.create_table(identifier, table_schema_nested) assert table_strip.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table_strip.metadata_location) + assert test_catalog._parse_metadata_version(table_strip.metadata_location) == 0 @mock_glue @@ -147,6 +153,7 @@ def test_load_table( table = test_catalog.load_table(identifier) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 @mock_glue @@ -229,6 +236,7 @@ def test_rename_table( table = test_catalog.create_table(identifier, table_schema_nested) assert table.identifier == (catalog_name,) + identifier assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 test_catalog.rename_table(identifier, new_identifier) new_table = test_catalog.load_table(new_identifier) assert new_table.identifier == (catalog_name,) + new_identifier @@ -507,3 +515,36 @@ def test_passing_profile_name() -> None: mock_session.assert_called_with(**session_properties) assert test_catalog.glue is mock_session().client() + + +@mock_glue +def test_commit_table_update_schema( + _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + catalog_name = "glue" + identifier = (database_name, table_name) + test_catalog = GlueCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}"}) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier, table_schema_nested) + original_table_metadata = table.metadata + + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + assert original_table_metadata.current_schema_id == 0 + + transaction = table.transaction() + update = transaction.update_schema() + update.add_column(path="b", field_type=IntegerType()) + update.commit() + transaction.commit_transaction() + + updated_table_metadata = table.metadata + + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.current_schema_id == 1 + assert len(updated_table_metadata.schemas) == 2 + new_schema = next(schema for schema in updated_table_metadata.schemas if schema.schema_id == 1) + assert new_schema + assert new_schema == update._apply() + assert new_schema.find_field("b").field_type == IntegerType() diff --git a/tests/conftest.py b/tests/conftest.py index 1197bf2feb..e54f1f54d6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1586,7 +1586,7 @@ def fixture_aws_credentials() -> Generator[None, None, None]: os.environ.pop("AWS_DEFAULT_REGION") -MOTO_SERVER = ThreadedMotoServer(port=5000) +MOTO_SERVER = ThreadedMotoServer(ip_address="localhost", port=5000) def pytest_sessionfinish( @@ -1687,7 +1687,7 @@ def database_list(database_name: str) -> List[str]: TABLE_METADATA_LOCATION_REGEX = re.compile( r"""s3://test_bucket/my_iceberg_database-[a-z]{20}.db/ my_iceberg_table-[a-z]{20}/metadata/ - 00000-[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}.metadata.json""", + [0-9]{5}-[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}.metadata.json""", re.X, ) From 46b36b36433a0ac34b8f2597970202790b50b6f2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 19:49:21 +0100 Subject: [PATCH 004/169] Build: Bump pytest from 7.4.3 to 7.4.4 (#248) --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 592ff9db3c..ac85dc5889 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2905,13 +2905,13 @@ tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -4182,4 +4182,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "1bfb3c025f6f1097a481e2f7bb81e73852bd49f7560a882feba3bc716df9ec24" +content-hash = "095bbebf0fd3d259025d05972f34aae6368b854ed59eb6567ef2c6da9f0da09f" diff --git a/pyproject.toml b/pyproject.toml index a06bd5ea65..31aecb26b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,7 +72,7 @@ psycopg2-binary = { version = ">=2.9.6", optional = true } sqlalchemy = { version = "^2.0.18", optional = true } [tool.poetry.dev-dependencies] -pytest = "7.4.3" +pytest = "7.4.4" pytest-checkdocs = "2.10.1" pytest-lazy-fixture = "0.6.3" pre-commit = "3.5.0" From 5af05bd57c94667e3760423fcdac5f9ce9e5ca6e Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 3 Jan 2024 10:07:11 +0100 Subject: [PATCH 005/169] Allow filtering on newly added columns (#246) * Allow filtering on newly added columns Resolves #217 * Thanks Amogh! --- dev/provision.py | 22 ++++++++++++++++++++-- pyiceberg/expressions/visitors.py | 9 +++++++-- tests/test_integration.py | 18 ++++++++++++++++++ 3 files changed, 45 insertions(+), 4 deletions(-) diff --git a/dev/provision.py b/dev/provision.py index ca6e5aa6aa..9917cd3f20 100644 --- a/dev/provision.py +++ b/dev/provision.py @@ -152,7 +152,6 @@ """ ) - spark.sql( """ DELETE FROM default.test_positional_mor_deletes WHERE number = 9 @@ -315,9 +314,28 @@ ) spark.sql( - f""" + """ INSERT INTO default.test_table_sanitized_character VALUES ('123') """ ) + +spark.sql( + """ +CREATE TABLE default.test_table_add_column ( + a string +) +USING iceberg +""" +) + +spark.sql("INSERT INTO default.test_table_add_column VALUES ('1')") + +spark.sql( + """ +ALTER TABLE default.test_table_add_column ADD COLUMN b string +""" +) + +spark.sql("INSERT INTO default.test_table_add_column VALUES ('2', '2')") diff --git a/pyiceberg/expressions/visitors.py b/pyiceberg/expressions/visitors.py index a13c1c5045..a185164cd1 100644 --- a/pyiceberg/expressions/visitors.py +++ b/pyiceberg/expressions/visitors.py @@ -892,8 +892,13 @@ def visit_unbound_predicate(self, predicate: UnboundPredicate[L]) -> BooleanExpr def visit_bound_predicate(self, predicate: BoundPredicate[L]) -> BooleanExpression: file_column_name = self.file_schema.find_column_name(predicate.term.ref().field.field_id) - if not file_column_name: - raise ValueError(f"Not found in file schema: {file_column_name}") + if file_column_name is None: + # In the case of schema evolution, the column might not be present + # in the file schema when reading older data + if isinstance(predicate, BoundIsNull): + return AlwaysTrue() + else: + return AlwaysFalse() if isinstance(predicate, BoundUnaryPredicate): return predicate.as_unbound(file_column_name) diff --git a/tests/test_integration.py b/tests/test_integration.py index 7cf017def9..2a173be3b3 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -143,6 +143,12 @@ def test_positional_mor_deletes(catalog: Catalog) -> Table: return catalog.load_table("default.test_positional_mor_deletes") +@pytest.fixture() +def test_table_add_column(catalog: Catalog) -> Table: + """Table that has a new column""" + return catalog.load_table("default.test_table_add_column") + + @pytest.fixture() def test_positional_mor_double_deletes(catalog: Catalog) -> Table: """Table that has multiple positional deletes""" @@ -373,6 +379,18 @@ def test_scan_branch(test_positional_mor_deletes: Table) -> None: assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12] +@pytest.mark.integration +def test_filter_on_new_column(test_table_add_column: Table) -> None: + arrow_table = test_table_add_column.scan(row_filter="b == '2'").to_arrow() + assert arrow_table["b"].to_pylist() == ['2'] + + arrow_table = test_table_add_column.scan(row_filter="b is not null").to_arrow() + assert arrow_table["b"].to_pylist() == ['2'] + + arrow_table = test_table_add_column.scan(row_filter="b is null").to_arrow() + assert arrow_table["b"].to_pylist() == [None] + + @pytest.mark.integration def test_upgrade_table_version(table_test_table_version: Table) -> None: assert table_test_table_version.format_version == 1 From b004e35d60900e4ff2372f069d0a346c5f2ef9fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 09:17:50 +0100 Subject: [PATCH 006/169] Build: Bump sqlalchemy from 2.0.24 to 2.0.25 (#250) Bumps [sqlalchemy](https://github.com/sqlalchemy/sqlalchemy) from 2.0.24 to 2.0.25. - [Release notes](https://github.com/sqlalchemy/sqlalchemy/releases) - [Changelog](https://github.com/sqlalchemy/sqlalchemy/blob/main/CHANGES.rst) - [Commits](https://github.com/sqlalchemy/sqlalchemy/commits) --- updated-dependencies: - dependency-name: sqlalchemy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 102 ++++++++++++++++++++++++++-------------------------- 1 file changed, 51 insertions(+), 51 deletions(-) diff --git a/poetry.lock b/poetry.lock index ac85dc5889..b77c97affb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3660,65 +3660,65 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.24" +version = "2.0.25" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f801d85ba4753d4ed97181d003e5d3fa330ac7c4587d131f61d7f968f416862"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b35c35e3923ade1e7ac44e150dec29f5863513246c8bf85e2d7d313e3832bcfb"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9b3fd5eca3c0b137a5e0e468e24ca544ed8ca4783e0e55341b7ed2807518ee"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6209e689d0ff206c40032b6418e3cfcfc5af044b3f66e381d7f1ae301544b4"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:37e89d965b52e8b20571b5d44f26e2124b26ab63758bf1b7598a0e38fb2c4005"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6910eb4ea90c0889f363965cd3c8c45a620ad27b526a7899f0054f6c1b9219e"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-win32.whl", hash = "sha256:d8e7e8a150e7b548e7ecd6ebb9211c37265991bf2504297d9454e01b58530fc6"}, - {file = "SQLAlchemy-2.0.24-cp310-cp310-win_amd64.whl", hash = "sha256:396f05c552f7fa30a129497c41bef5b4d1423f9af8fe4df0c3dcd38f3e3b9a14"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:adbd67dac4ebf54587198b63cd30c29fd7eafa8c0cab58893d9419414f8efe4b"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a0f611b431b84f55779cbb7157257d87b4a2876b067c77c4f36b15e44ced65e2"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56a0e90a959e18ac5f18c80d0cad9e90cb09322764f536e8a637426afb1cae2f"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6db686a1d9f183c639f7e06a2656af25d4ed438eda581de135d15569f16ace33"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0cc0b486a56dff72dddae6b6bfa7ff201b0eeac29d4bc6f0e9725dc3c360d71"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a1d4856861ba9e73bac05030cec5852eabfa9ef4af8e56c19d92de80d46fc34"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-win32.whl", hash = "sha256:a3c2753bf4f48b7a6024e5e8a394af49b1b12c817d75d06942cae03d14ff87b3"}, - {file = "SQLAlchemy-2.0.24-cp311-cp311-win_amd64.whl", hash = "sha256:38732884eabc64982a09a846bacf085596ff2371e4e41d20c0734f7e50525d01"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9f992e0f916201731993eab8502912878f02287d9f765ef843677ff118d0e0b1"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2587e108463cc2e5b45a896b2e7cc8659a517038026922a758bde009271aed11"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb7cedcddffca98c40bb0becd3423e293d1fef442b869da40843d751785beb3"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fa6df0e035689df89ff77a46bf8738696785d3156c2c61494acdcddc75c69d"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cc889fda484d54d0b31feec409406267616536d048a450fc46943e152700bb79"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57ef6f2cb8b09a042d0dbeaa46a30f2df5dd1e1eb889ba258b0d5d7d6011b81c"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-win32.whl", hash = "sha256:ea490564435b5b204d8154f0e18387b499ea3cedc1e6af3b3a2ab18291d85aa7"}, - {file = "SQLAlchemy-2.0.24-cp312-cp312-win_amd64.whl", hash = "sha256:ccfd336f96d4c9bbab0309f2a565bf15c468c2d8b2d277a32f89c5940f71fcf9"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9aaaaa846b10dfbe1bda71079d0e31a7e2cebedda9409fa7dba3dfed1ae803e8"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95bae3d38f8808d79072da25d5e5a6095f36fe1f9d6c614dd72c59ca8397c7c0"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04191a7c8d77e63f6fc1e8336d6c6e93176c0c010833e74410e647f0284f5a1"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:acc58b7c2e40235712d857fdfc8f2bda9608f4a850d8d9ac0dd1fc80939ca6ac"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00d76fe5d7cdb5d84d625ce002ce29fefba0bfd98e212ae66793fed30af73931"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-win32.whl", hash = "sha256:29e51f848f843bbd75d74ae64ab1ab06302cb1dccd4549d1f5afe6b4a946edb2"}, - {file = "SQLAlchemy-2.0.24-cp37-cp37m-win_amd64.whl", hash = "sha256:e9d036e343a604db3f5a6c33354018a84a1d3f6dcae3673358b404286204798c"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9bafaa05b19dc07fa191c1966c5e852af516840b0d7b46b7c3303faf1a349bc9"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e69290b921b7833c04206f233d6814c60bee1d135b09f5ae5d39229de9b46cd4"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8398593ccc4440ce6dffcc4f47d9b2d72b9fe7112ac12ea4a44e7d4de364db1"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f073321a79c81e1a009218a21089f61d87ee5fa3c9563f6be94f8b41ff181812"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9036ebfd934813990c5b9f71f297e77ed4963720db7d7ceec5a3fdb7cd2ef6ce"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcf84fe93397a0f67733aa2a38ed4eab9fc6348189fc950e656e1ea198f45668"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-win32.whl", hash = "sha256:6f5e75de91c754365c098ac08c13fdb267577ce954fa239dd49228b573ca88d7"}, - {file = "SQLAlchemy-2.0.24-cp38-cp38-win_amd64.whl", hash = "sha256:9f29c7f0f4b42337ec5a779e166946a9f86d7d56d827e771b69ecbdf426124ac"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07cc423892f2ceda9ae1daa28c0355757f362ecc7505b1ab1a3d5d8dc1c44ac6"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a479aa1ab199178ff1956b09ca8a0693e70f9c762875d69292d37049ffd0d8f"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b8d0e8578e7f853f45f4512b5c920f6a546cd4bed44137460b2a56534644205"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17e7e27af178d31b436dda6a596703b02a89ba74a15e2980c35ecd9909eea3a"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1ca7903d5e7db791a355b579c690684fac6304478b68efdc7f2ebdcfe770d8d7"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db09e424d7bb89b6215a184ca93b4f29d7f00ea261b787918a1af74143b98c06"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-win32.whl", hash = "sha256:a5cd7d30e47f87b21362beeb3e86f1b5886e7d9b0294b230dde3d3f4a1591375"}, - {file = "SQLAlchemy-2.0.24-cp39-cp39-win_amd64.whl", hash = "sha256:7ae5d44517fe81079ce75cf10f96978284a6db2642c5932a69c82dbae09f009a"}, - {file = "SQLAlchemy-2.0.24-py3-none-any.whl", hash = "sha256:8f358f5cfce04417b6ff738748ca4806fe3d3ae8040fb4e6a0c9a6973ccf9b6e"}, - {file = "SQLAlchemy-2.0.24.tar.gz", hash = "sha256:6db97656fd3fe3f7e5b077f12fa6adb5feb6e0b567a3e99f47ecf5f7ea0a09e3"}, + {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4344d059265cc8b1b1be351bfb88749294b87a8b2bbe21dfbe066c4199541ebd"}, + {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9e2e59cbcc6ba1488404aad43de005d05ca56e069477b33ff74e91b6319735"}, + {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84daa0a2055df9ca0f148a64fdde12ac635e30edbca80e87df9b3aaf419e144a"}, + {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc8b7dabe8e67c4832891a5d322cec6d44ef02f432b4588390017f5cec186a84"}, + {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5693145220517b5f42393e07a6898acdfe820e136c98663b971906120549da5"}, + {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db854730a25db7c956423bb9fb4bdd1216c839a689bf9cc15fada0a7fb2f4570"}, + {file = "SQLAlchemy-2.0.25-cp310-cp310-win32.whl", hash = "sha256:14a6f68e8fc96e5e8f5647ef6cda6250c780612a573d99e4d881581432ef1669"}, + {file = "SQLAlchemy-2.0.25-cp310-cp310-win_amd64.whl", hash = "sha256:87f6e732bccd7dcf1741c00f1ecf33797383128bd1c90144ac8adc02cbb98643"}, + {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:342d365988ba88ada8af320d43df4e0b13a694dbd75951f537b2d5e4cb5cd002"}, + {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f37c0caf14b9e9b9e8f6dbc81bc56db06acb4363eba5a633167781a48ef036ed"}, + {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9373708763ef46782d10e950b49d0235bfe58facebd76917d3f5cbf5971aed"}, + {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24f571990c05f6b36a396218f251f3e0dda916e0c687ef6fdca5072743208f5"}, + {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75432b5b14dc2fff43c50435e248b45c7cdadef73388e5610852b95280ffd0e9"}, + {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:884272dcd3ad97f47702965a0e902b540541890f468d24bd1d98bcfe41c3f018"}, + {file = "SQLAlchemy-2.0.25-cp311-cp311-win32.whl", hash = "sha256:e607cdd99cbf9bb80391f54446b86e16eea6ad309361942bf88318bcd452363c"}, + {file = "SQLAlchemy-2.0.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d505815ac340568fd03f719446a589162d55c52f08abd77ba8964fbb7eb5b5f"}, + {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0dacf67aee53b16f365c589ce72e766efaabd2b145f9de7c917777b575e3659d"}, + {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b801154027107461ee992ff4b5c09aa7cc6ec91ddfe50d02bca344918c3265c6"}, + {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a21853f5daeb50412d459cfb13cb82c089ad4c04ec208cd14dddd99fc23b39"}, + {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29049e2c299b5ace92cbed0c1610a7a236f3baf4c6b66eb9547c01179f638ec5"}, + {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b64b183d610b424a160b0d4d880995e935208fc043d0302dd29fee32d1ee3f95"}, + {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f7a7d7fcc675d3d85fbf3b3828ecd5990b8d61bd6de3f1b260080b3beccf215"}, + {file = "SQLAlchemy-2.0.25-cp312-cp312-win32.whl", hash = "sha256:cf18ff7fc9941b8fc23437cc3e68ed4ebeff3599eec6ef5eebf305f3d2e9a7c2"}, + {file = "SQLAlchemy-2.0.25-cp312-cp312-win_amd64.whl", hash = "sha256:91f7d9d1c4dd1f4f6e092874c128c11165eafcf7c963128f79e28f8445de82d5"}, + {file = "SQLAlchemy-2.0.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bb209a73b8307f8fe4fe46f6ad5979649be01607f11af1eb94aa9e8a3aaf77f0"}, + {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:798f717ae7c806d67145f6ae94dc7c342d3222d3b9a311a784f371a4333212c7"}, + {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd402169aa00df3142149940b3bf9ce7dde075928c1886d9a1df63d4b8de62"}, + {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0d3cab3076af2e4aa5693f89622bef7fa770c6fec967143e4da7508b3dceb9b9"}, + {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:74b080c897563f81062b74e44f5a72fa44c2b373741a9ade701d5f789a10ba23"}, + {file = "SQLAlchemy-2.0.25-cp37-cp37m-win32.whl", hash = "sha256:87d91043ea0dc65ee583026cb18e1b458d8ec5fc0a93637126b5fc0bc3ea68c4"}, + {file = "SQLAlchemy-2.0.25-cp37-cp37m-win_amd64.whl", hash = "sha256:75f99202324383d613ddd1f7455ac908dca9c2dd729ec8584c9541dd41822a2c"}, + {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:420362338681eec03f53467804541a854617faed7272fe71a1bfdb07336a381e"}, + {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c88f0c7dcc5f99bdb34b4fd9b69b93c89f893f454f40219fe923a3a2fd11625"}, + {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3be4987e3ee9d9a380b66393b77a4cd6d742480c951a1c56a23c335caca4ce3"}, + {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a159111a0f58fb034c93eeba211b4141137ec4b0a6e75789ab7a3ef3c7e7e3"}, + {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8b8cb63d3ea63b29074dcd29da4dc6a97ad1349151f2d2949495418fd6e48db9"}, + {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:736ea78cd06de6c21ecba7416499e7236a22374561493b456a1f7ffbe3f6cdb4"}, + {file = "SQLAlchemy-2.0.25-cp38-cp38-win32.whl", hash = "sha256:10331f129982a19df4284ceac6fe87353ca3ca6b4ca77ff7d697209ae0a5915e"}, + {file = "SQLAlchemy-2.0.25-cp38-cp38-win_amd64.whl", hash = "sha256:c55731c116806836a5d678a70c84cb13f2cedba920212ba7dcad53260997666d"}, + {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:605b6b059f4b57b277f75ace81cc5bc6335efcbcc4ccb9066695e515dbdb3900"}, + {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:665f0a3954635b5b777a55111ababf44b4fc12b1f3ba0a435b602b6387ffd7cf"}, + {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecf6d4cda1f9f6cb0b45803a01ea7f034e2f1aed9475e883410812d9f9e3cfcf"}, + {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51db269513917394faec5e5c00d6f83829742ba62e2ac4fa5c98d58be91662f"}, + {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:790f533fa5c8901a62b6fef5811d48980adeb2f51f1290ade8b5e7ba990ba3de"}, + {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1b1180cda6df7af84fe72e4530f192231b1f29a7496951db4ff38dac1687202d"}, + {file = "SQLAlchemy-2.0.25-cp39-cp39-win32.whl", hash = "sha256:555651adbb503ac7f4cb35834c5e4ae0819aab2cd24857a123370764dc7d7e24"}, + {file = "SQLAlchemy-2.0.25-cp39-cp39-win_amd64.whl", hash = "sha256:dc55990143cbd853a5d038c05e79284baedf3e299661389654551bd02a6a68d7"}, + {file = "SQLAlchemy-2.0.25-py3-none-any.whl", hash = "sha256:a86b4240e67d4753dc3092d9511886795b3c2852abe599cffe108952f7af7ac3"}, + {file = "SQLAlchemy-2.0.25.tar.gz", hash = "sha256:a2c69a7664fb2d54b8682dd774c3b54f67f84fa123cf84dda2a5f40dcaa04e08"}, ] [package.dependencies] greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} -typing-extensions = ">=4.2.0" +typing-extensions = ">=4.6.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] From 452238e33f2be86b9076be78db2441aea043afb8 Mon Sep 17 00:00:00 2001 From: Mehul Batra <66407733+MehulBatra@users.noreply.github.com> Date: Thu, 4 Jan 2024 13:48:15 +0530 Subject: [PATCH 007/169] Bug fix falsy value of zero (#249) * certain values are considered False in a boolean context. These include None,0, empty sequences/collections, we are explicity handling that as in our case 0 stands for true * Update pyiceberg/table/__init__.py Co-authored-by: Fokko Driesprong --------- Co-authored-by: Fokko Driesprong --- pyiceberg/table/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 40b37cc248..fa04fb7b24 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -809,8 +809,8 @@ def new_snapshot_id(self) -> int: def current_snapshot(self) -> Optional[Snapshot]: """Get the current snapshot for this table, or None if there is no current snapshot.""" - if snapshot_id := self.metadata.current_snapshot_id: - return self.snapshot_by_id(snapshot_id) + if self.metadata.current_snapshot_id is not None: + return self.snapshot_by_id(self.metadata.current_snapshot_id) return None def snapshot_by_id(self, snapshot_id: int) -> Optional[Snapshot]: From dba1ef8d85b438d4977586998c99a9dab369491c Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 5 Jan 2024 08:43:55 +0100 Subject: [PATCH 008/169] Correct schema behavior (#247) * Correct schema behavior When we alter the schema, we want to use the latest schema by default, except when you select a specific snapshot that has a schema-id. * Add warning if schema-id is missing from the metadata * Catch unexisting snapshots --- pyiceberg/table/__init__.py | 21 ++++++--- tests/table/test_init.py | 89 ++++++++++++++++++++++++++++++++++++- 2 files changed, 103 insertions(+), 7 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index fa04fb7b24..1a4c39cfd2 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -19,6 +19,7 @@ import datetime import itertools import uuid +import warnings from abc import ABC, abstractmethod from copy import copy from dataclasses import dataclass @@ -942,15 +943,23 @@ def snapshot(self) -> Optional[Snapshot]: return self.table.current_snapshot() def projection(self) -> Schema: - snapshot_schema = self.table.schema() - if snapshot := self.snapshot(): - if snapshot.schema_id is not None: - snapshot_schema = self.table.schemas()[snapshot.schema_id] + current_schema = self.table.schema() + if self.snapshot_id is not None: + snapshot = self.table.snapshot_by_id(self.snapshot_id) + if snapshot is not None: + if snapshot.schema_id is not None: + snapshot_schema = self.table.schemas().get(snapshot.schema_id) + if snapshot_schema is not None: + current_schema = snapshot_schema + else: + warnings.warn(f"Metadata does not contain schema with id: {snapshot.schema_id}") + else: + raise ValueError(f"Snapshot not found: {self.snapshot_id}") if "*" in self.selected_fields: - return snapshot_schema + return current_schema - return snapshot_schema.select(*self.selected_fields, case_sensitive=self.case_sensitive) + return current_schema.select(*self.selected_fields, case_sensitive=self.case_sensitive) @abstractmethod def plan_files(self) -> Iterable[ScanTask]: diff --git a/tests/table/test_init.py b/tests/table/test_init.py index 04d467c318..547990c4c8 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -22,6 +22,7 @@ import pytest from sortedcontainers import SortedList +from pyiceberg.catalog.noop import NoopCatalog from pyiceberg.exceptions import CommitFailedException from pyiceberg.expressions import ( AlwaysTrue, @@ -29,7 +30,7 @@ EqualTo, In, ) -from pyiceberg.io import PY_IO_IMPL +from pyiceberg.io import PY_IO_IMPL, load_file_io from pyiceberg.manifest import ( DataFile, DataFileContent, @@ -848,3 +849,89 @@ def test_assert_default_sort_order_id(table_v2: Table) -> None: match="Requirement failed: default sort order id has changed: expected 1, found 3", ): AssertDefaultSortOrderId(default_sort_order_id=1).validate(base_metadata) + + +def test_correct_schema() -> None: + table_metadata = TableMetadataV2( + **{ + "format-version": 2, + "table-uuid": "9c12d441-03fe-4693-9a96-a0705ddf69c1", + "location": "s3://bucket/test/location", + "last-sequence-number": 34, + "last-updated-ms": 1602638573590, + "last-column-id": 3, + "current-schema-id": 1, + "schemas": [ + {"type": "struct", "schema-id": 0, "fields": [{"id": 1, "name": "x", "required": True, "type": "long"}]}, + { + "type": "struct", + "schema-id": 1, + "identifier-field-ids": [1, 2], + "fields": [ + {"id": 1, "name": "x", "required": True, "type": "long"}, + {"id": 2, "name": "y", "required": True, "type": "long"}, + {"id": 3, "name": "z", "required": True, "type": "long"}, + ], + }, + ], + "default-spec-id": 0, + "partition-specs": [ + {"spec-id": 0, "fields": [{"name": "x", "transform": "identity", "source-id": 1, "field-id": 1000}]} + ], + "last-partition-id": 1000, + "default-sort-order-id": 0, + "sort-orders": [], + "current-snapshot-id": 123, + "snapshots": [ + { + "snapshot-id": 234, + "timestamp-ms": 1515100955770, + "sequence-number": 0, + "summary": {"operation": "append"}, + "manifest-list": "s3://a/b/1.avro", + "schema-id": 10, + }, + { + "snapshot-id": 123, + "timestamp-ms": 1515100955770, + "sequence-number": 0, + "summary": {"operation": "append"}, + "manifest-list": "s3://a/b/1.avro", + "schema-id": 0, + }, + ], + } + ) + + t = Table( + identifier=("default", "t1"), + metadata=table_metadata, + metadata_location="s3://../..", + io=load_file_io(), + catalog=NoopCatalog("NoopCatalog"), + ) + + # Should use the current schema, instead the one from the snapshot + assert t.scan().projection() == Schema( + NestedField(field_id=1, name='x', field_type=LongType(), required=True), + NestedField(field_id=2, name='y', field_type=LongType(), required=True), + NestedField(field_id=3, name='z', field_type=LongType(), required=True), + schema_id=1, + identifier_field_ids=[1, 2], + ) + + # When we explicitly filter on the commit, we want to have the schema that's linked to the snapshot + assert t.scan(snapshot_id=123).projection() == Schema( + NestedField(field_id=1, name='x', field_type=LongType(), required=True), + schema_id=0, + identifier_field_ids=[], + ) + + with pytest.warns(UserWarning, match="Metadata does not contain schema with id: 10"): + t.scan(snapshot_id=234).projection() + + # Invalid snapshot + with pytest.raises(ValueError) as exc_info: + _ = t.scan(snapshot_id=-1).projection() + + assert "Snapshot not found: -1" in str(exc_info.value) From 33b4b736209b8f3b430fbf6f388333947433e13d Mon Sep 17 00:00:00 2001 From: Hussein Awala Date: Fri, 5 Jan 2024 21:09:31 +0100 Subject: [PATCH 009/169] Replace Black by Ruff formatter (#127) * Replace black by Ruff Formatter * update ruff version * fix format * Update ruff format and add preserve as a quote style --- .pre-commit-config.yaml | 11 +- pyiceberg/avro/codecs/__init__.py | 1 + pyiceberg/avro/codecs/codec.py | 6 +- pyiceberg/avro/file.py | 1 + pyiceberg/avro/reader.py | 7 +- pyiceberg/avro/resolver.py | 4 +- pyiceberg/avro/writer.py | 4 +- pyiceberg/cli/output.py | 45 +++------ pyiceberg/conversions.py | 1 + pyiceberg/expressions/__init__.py | 33 ++---- pyiceberg/expressions/literals.py | 3 +- pyiceberg/io/__init__.py | 19 ++-- pyiceberg/io/fsspec.py | 1 + pyiceberg/io/pyarrow.py | 1 + pyiceberg/manifest.py | 66 ++++++------ pyiceberg/table/__init__.py | 9 +- pyiceberg/table/name_mapping.py | 1 + pyiceberg/transforms.py | 15 +-- pyiceberg/types.py | 1 + pyiceberg/utils/concurrent.py | 1 + pyiceberg/utils/datetime.py | 1 + pyiceberg/utils/decimal.py | 1 + pyiceberg/utils/schema_conversion.py | 1 + pyiceberg/utils/singleton.py | 1 + pyproject.toml | 3 + tests/avro/test_reader.py | 4 +- tests/avro/test_resolver.py | 50 +++++---- tests/avro/test_writer.py | 40 ++++---- tests/catalog/test_base.py | 26 +++-- tests/catalog/test_dynamodb.py | 5 +- tests/catalog/test_glue.py | 9 +- tests/conftest.py | 1 + tests/expressions/test_expressions.py | 6 +- tests/expressions/test_visitors.py | 12 +-- tests/io/test_pyarrow.py | 12 +-- tests/io/test_pyarrow_visitor.py | 12 +-- tests/table/test_init.py | 12 +-- tests/table/test_name_mapping.py | 140 ++++++++++++-------------- tests/test_conversions.py | 1 + tests/test_transforms.py | 4 +- 40 files changed, 257 insertions(+), 314 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c4424513d2..db6742a8cd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,15 +29,12 @@ repos: - id: check-ast - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version (Used for linting) - rev: v0.1.0 + rev: v0.1.8 hooks: - id: ruff - args: [ --fix, --exit-non-zero-on-fix ] - - repo: https://github.com/ambv/black - rev: 23.10.0 - hooks: - - id: black - args: [--skip-string-normalization] + args: [ --fix, --exit-non-zero-on-fix, --preview ] + - id: ruff-format + args: [ --preview ] - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.6.1 hooks: diff --git a/pyiceberg/avro/codecs/__init__.py b/pyiceberg/avro/codecs/__init__.py index 8010b9397e..22e2f71cf8 100644 --- a/pyiceberg/avro/codecs/__init__.py +++ b/pyiceberg/avro/codecs/__init__.py @@ -23,6 +23,7 @@ so don't confuse it with the Python's "codecs", which is a package mainly for converting character sets (https://docs.python.org/3/library/codecs.html). """ + from __future__ import annotations from typing import Dict, Optional, Type diff --git a/pyiceberg/avro/codecs/codec.py b/pyiceberg/avro/codecs/codec.py index 1c04f0db3e..ab97ab6990 100644 --- a/pyiceberg/avro/codecs/codec.py +++ b/pyiceberg/avro/codecs/codec.py @@ -24,10 +24,8 @@ class Codec(ABC): @staticmethod @abstractmethod - def compress(data: bytes) -> tuple[bytes, int]: - ... + def compress(data: bytes) -> tuple[bytes, int]: ... @staticmethod @abstractmethod - def decompress(data: bytes) -> bytes: - ... + def decompress(data: bytes) -> bytes: ... diff --git a/pyiceberg/avro/file.py b/pyiceberg/avro/file.py index 4985c6fb60..a8befd9f91 100644 --- a/pyiceberg/avro/file.py +++ b/pyiceberg/avro/file.py @@ -16,6 +16,7 @@ # under the License. # pylint: disable=W0621 """Avro reader for reading Avro files.""" + from __future__ import annotations import io diff --git a/pyiceberg/avro/reader.py b/pyiceberg/avro/reader.py index 2b87e4b06f..988bd42ba4 100644 --- a/pyiceberg/avro/reader.py +++ b/pyiceberg/avro/reader.py @@ -23,6 +23,7 @@ The reader tree can be changed in such a way that the read schema is different, while respecting the read schema. """ + from __future__ import annotations from abc import abstractmethod @@ -85,12 +86,10 @@ def _skip_map_array(decoder: BinaryDecoder, skip_entry: Callable[[], None]) -> N class Reader(Singleton): @abstractmethod - def read(self, decoder: BinaryDecoder) -> Any: - ... + def read(self, decoder: BinaryDecoder) -> Any: ... @abstractmethod - def skip(self, decoder: BinaryDecoder) -> None: - ... + def skip(self, decoder: BinaryDecoder) -> None: ... def __repr__(self) -> str: """Return the string representation of the Reader class.""" diff --git a/pyiceberg/avro/resolver.py b/pyiceberg/avro/resolver.py index faf4dd0501..2a53f4869b 100644 --- a/pyiceberg/avro/resolver.py +++ b/pyiceberg/avro/resolver.py @@ -232,9 +232,7 @@ def resolve_reader( Raises: NotImplementedError: If attempting to resolve an unrecognized object type. """ - return visit_with_partner( - file_schema, read_schema, ReadSchemaResolver(read_types, read_enums), SchemaPartnerAccessor() - ) # type: ignore + return visit_with_partner(file_schema, read_schema, ReadSchemaResolver(read_types, read_enums), SchemaPartnerAccessor()) # type: ignore class EnumReader(Reader): diff --git a/pyiceberg/avro/writer.py b/pyiceberg/avro/writer.py index 4e3d3d476a..b53230d3c7 100644 --- a/pyiceberg/avro/writer.py +++ b/pyiceberg/avro/writer.py @@ -20,6 +20,7 @@ Constructing a writer tree from the schema makes it easy to decouple the writing implementation from the schema. """ + from __future__ import annotations from abc import abstractmethod @@ -43,8 +44,7 @@ @dataclass(frozen=True) class Writer(Singleton): @abstractmethod - def write(self, encoder: BinaryEncoder, val: Any) -> Any: - ... + def write(self, encoder: BinaryEncoder, val: Any) -> Any: ... def __repr__(self) -> str: """Return string representation of this object.""" diff --git a/pyiceberg/cli/output.py b/pyiceberg/cli/output.py index 8e0ad2deee..18cdab1556 100644 --- a/pyiceberg/cli/output.py +++ b/pyiceberg/cli/output.py @@ -40,48 +40,37 @@ class Output(ABC): """Output interface for exporting.""" @abstractmethod - def exception(self, ex: Exception) -> None: - ... + def exception(self, ex: Exception) -> None: ... @abstractmethod - def identifiers(self, identifiers: List[Identifier]) -> None: - ... + def identifiers(self, identifiers: List[Identifier]) -> None: ... @abstractmethod - def describe_table(self, table: Table) -> None: - ... + def describe_table(self, table: Table) -> None: ... @abstractmethod - def files(self, table: Table, history: bool) -> None: - ... + def files(self, table: Table, history: bool) -> None: ... @abstractmethod - def describe_properties(self, properties: Properties) -> None: - ... + def describe_properties(self, properties: Properties) -> None: ... @abstractmethod - def text(self, response: str) -> None: - ... + def text(self, response: str) -> None: ... @abstractmethod - def schema(self, schema: Schema) -> None: - ... + def schema(self, schema: Schema) -> None: ... @abstractmethod - def spec(self, spec: PartitionSpec) -> None: - ... + def spec(self, spec: PartitionSpec) -> None: ... @abstractmethod - def uuid(self, uuid: Optional[UUID]) -> None: - ... + def uuid(self, uuid: Optional[UUID]) -> None: ... @abstractmethod - def version(self, version: str) -> None: - ... + def version(self, version: str) -> None: ... @abstractmethod - def describe_refs(self, refs: List[Tuple[str, SnapshotRefType, Dict[str, str]]]) -> None: - ... + def describe_refs(self, refs: List[Tuple[str, SnapshotRefType, Dict[str, str]]]) -> None: ... class ConsoleOutput(Output): @@ -252,10 +241,8 @@ def version(self, version: str) -> None: self._out({"version": version}) def describe_refs(self, refs: List[Tuple[str, SnapshotRefType, Dict[str, str]]]) -> None: - self._out( - [ - {"name": name, "type": type, detail_key: detail_val} - for name, type, detail in refs - for detail_key, detail_val in detail.items() - ] - ) + self._out([ + {"name": name, "type": type, detail_key: detail_val} + for name, type, detail in refs + for detail_key, detail_val in detail.items() + ]) diff --git a/pyiceberg/conversions.py b/pyiceberg/conversions.py index 83ff172993..b523deff48 100644 --- a/pyiceberg/conversions.py +++ b/pyiceberg/conversions.py @@ -27,6 +27,7 @@ implementation, a concrete function is registered for each generic conversion function. For PrimitiveType implementations that share the same conversion logic, registrations can be stacked. """ + import uuid from datetime import date, datetime, time from decimal import Decimal diff --git a/pyiceberg/expressions/__init__.py b/pyiceberg/expressions/__init__.py index cb46a70401..5adf3a8a48 100644 --- a/pyiceberg/expressions/__init__.py +++ b/pyiceberg/expressions/__init__.py @@ -80,13 +80,11 @@ class Unbound(Generic[B], ABC): """Represents an unbound value expression.""" @abstractmethod - def bind(self, schema: Schema, case_sensitive: bool = True) -> B: - ... + def bind(self, schema: Schema, case_sensitive: bool = True) -> B: ... @property @abstractmethod - def as_bound(self) -> Type[Bound]: - ... + def as_bound(self) -> Type[Bound]: ... class BoundTerm(Term[L], Bound, ABC): @@ -142,8 +140,7 @@ class UnboundTerm(Term[Any], Unbound[BoundTerm[L]], ABC): """Represents an unbound term.""" @abstractmethod - def bind(self, schema: Schema, case_sensitive: bool = True) -> BoundTerm[L]: - ... + def bind(self, schema: Schema, case_sensitive: bool = True) -> BoundTerm[L]: ... class Reference(UnboundTerm[Any]): @@ -352,8 +349,7 @@ def __eq__(self, other: Any) -> bool: @property @abstractmethod - def as_unbound(self) -> Type[UnboundPredicate[Any]]: - ... + def as_unbound(self) -> Type[UnboundPredicate[Any]]: ... class UnboundPredicate(Generic[L], Unbound[BooleanExpression], BooleanExpression, ABC): @@ -367,13 +363,11 @@ def __eq__(self, other: Any) -> bool: return self.term == other.term if isinstance(other, self.__class__) else False @abstractmethod - def bind(self, schema: Schema, case_sensitive: bool = True) -> BooleanExpression: - ... + def bind(self, schema: Schema, case_sensitive: bool = True) -> BooleanExpression: ... @property @abstractmethod - def as_bound(self) -> Type[BoundPredicate[L]]: - ... + def as_bound(self) -> Type[BoundPredicate[L]]: ... class UnaryPredicate(UnboundPredicate[Any], ABC): @@ -387,8 +381,7 @@ def __repr__(self) -> str: @property @abstractmethod - def as_bound(self) -> Type[BoundUnaryPredicate[Any]]: - ... + def as_bound(self) -> Type[BoundUnaryPredicate[Any]]: ... class BoundUnaryPredicate(BoundPredicate[L], ABC): @@ -398,8 +391,7 @@ def __repr__(self) -> str: @property @abstractmethod - def as_unbound(self) -> Type[UnaryPredicate]: - ... + def as_unbound(self) -> Type[UnaryPredicate]: ... def __getnewargs__(self) -> Tuple[BoundTerm[L]]: """Pickle the BoundUnaryPredicate class.""" @@ -575,8 +567,7 @@ def __getnewargs__(self) -> Tuple[BoundTerm[L], Set[Literal[L]]]: @property @abstractmethod - def as_unbound(self) -> Type[SetPredicate[L]]: - ... + def as_unbound(self) -> Type[SetPredicate[L]]: ... class BoundIn(BoundSetPredicate[L]): @@ -705,8 +696,7 @@ def __repr__(self) -> str: @property @abstractmethod - def as_bound(self) -> Type[BoundLiteralPredicate[L]]: - ... + def as_bound(self) -> Type[BoundLiteralPredicate[L]]: ... class BoundLiteralPredicate(BoundPredicate[L], ABC): @@ -729,8 +719,7 @@ def __repr__(self) -> str: @property @abstractmethod - def as_unbound(self) -> Type[LiteralPredicate[L]]: - ... + def as_unbound(self) -> Type[LiteralPredicate[L]]: ... class BoundEqualTo(BoundLiteralPredicate[L]): diff --git a/pyiceberg/expressions/literals.py b/pyiceberg/expressions/literals.py index 6d65058897..bf76be36e4 100644 --- a/pyiceberg/expressions/literals.py +++ b/pyiceberg/expressions/literals.py @@ -78,8 +78,7 @@ def value(self) -> L: @singledispatchmethod @abstractmethod - def to(self, type_var: IcebergType) -> Literal[L]: - ... # pragma: no cover + def to(self, type_var: IcebergType) -> Literal[L]: ... # pragma: no cover def __repr__(self) -> str: """Return the string representation of the Literal class.""" diff --git a/pyiceberg/io/__init__.py b/pyiceberg/io/__init__.py index dc94dc8a4e..1b0bc71af0 100644 --- a/pyiceberg/io/__init__.py +++ b/pyiceberg/io/__init__.py @@ -22,6 +22,7 @@ for returning an InputFile instance, an OutputFile instance, and deleting a file given its location. """ + from __future__ import annotations import importlib @@ -78,20 +79,16 @@ class InputStream(Protocol): """ @abstractmethod - def read(self, size: int = 0) -> bytes: - ... + def read(self, size: int = 0) -> bytes: ... @abstractmethod - def seek(self, offset: int, whence: int = SEEK_SET) -> int: - ... + def seek(self, offset: int, whence: int = SEEK_SET) -> int: ... @abstractmethod - def tell(self) -> int: - ... + def tell(self) -> int: ... @abstractmethod - def close(self) -> None: - ... + def close(self) -> None: ... def __enter__(self) -> InputStream: """Provide setup when opening an InputStream using a 'with' statement.""" @@ -112,12 +109,10 @@ class OutputStream(Protocol): # pragma: no cover """ @abstractmethod - def write(self, b: bytes) -> int: - ... + def write(self, b: bytes) -> int: ... @abstractmethod - def close(self) -> None: - ... + def close(self) -> None: ... @abstractmethod def __enter__(self) -> OutputStream: diff --git a/pyiceberg/io/fsspec.py b/pyiceberg/io/fsspec.py index 18b1f5b885..cfb14f240f 100644 --- a/pyiceberg/io/fsspec.py +++ b/pyiceberg/io/fsspec.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. """FileIO implementation for reading and writing table files that uses fsspec compatible filesystems.""" + import errno import json import logging diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index a537cf7a30..f1551133be 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -22,6 +22,7 @@ type to use. Theoretically, this allows the supported storage types to grow naturally with the pyarrow library. """ + from __future__ import annotations import concurrent.futures diff --git a/pyiceberg/manifest.py b/pyiceberg/manifest.py index ec7c218b1c..0ddfcd28d5 100644 --- a/pyiceberg/manifest.py +++ b/pyiceberg/manifest.py @@ -301,31 +301,27 @@ def _(partition_field_type: PrimitiveType) -> PrimitiveType: def data_file_with_partition(partition_type: StructType, format_version: Literal[1, 2]) -> StructType: - data_file_partition_type = StructType( - *[ - NestedField( - field_id=field.field_id, - name=field.name, - field_type=partition_field_to_data_file_partition_field(field.field_type), - ) - for field in partition_type.fields - ] - ) + data_file_partition_type = StructType(*[ + NestedField( + field_id=field.field_id, + name=field.name, + field_type=partition_field_to_data_file_partition_field(field.field_type), + ) + for field in partition_type.fields + ]) - return StructType( - *[ - NestedField( - field_id=102, - name="partition", - field_type=data_file_partition_type, - required=True, - doc="Partition data tuple, schema based on the partition spec", - ) - if field.field_id == 102 - else field - for field in DATA_FILE_TYPE[format_version].fields - ] - ) + return StructType(*[ + NestedField( + field_id=102, + name="partition", + field_type=data_file_partition_type, + required=True, + doc="Partition data tuple, schema based on the partition spec", + ) + if field.field_id == 102 + else field + for field in DATA_FILE_TYPE[format_version].fields + ]) class DataFile(Record): @@ -410,12 +406,10 @@ def __eq__(self, other: Any) -> bool: def manifest_entry_schema_with_data_file(format_version: Literal[1, 2], data_file: StructType) -> Schema: - return Schema( - *[ - NestedField(2, "data_file", data_file, required=True) if field.field_id == 2 else field - for field in MANIFEST_ENTRY_SCHEMAS[format_version].fields - ] - ) + return Schema(*[ + NestedField(2, "data_file", data_file, required=True) if field.field_id == 2 else field + for field in MANIFEST_ENTRY_SCHEMAS[format_version].fields + ]) class ManifestEntry(Record): @@ -699,13 +693,11 @@ def __exit__( self._writer.__exit__(exc_type, exc_value, traceback) @abstractmethod - def content(self) -> ManifestContent: - ... + def content(self) -> ManifestContent: ... @property @abstractmethod - def version(self) -> Literal[1, 2]: - ... + def version(self) -> Literal[1, 2]: ... def _with_partition(self, format_version: Literal[1, 2]) -> Schema: data_file_type = data_file_with_partition( @@ -723,8 +715,7 @@ def new_writer(self) -> AvroOutputFile[ManifestEntry]: ) @abstractmethod - def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: - ... + def prepare_entry(self, entry: ManifestEntry) -> ManifestEntry: ... def to_manifest_file(self) -> ManifestFile: """Return the manifest file.""" @@ -875,8 +866,7 @@ def __exit__( return @abstractmethod - def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: - ... + def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: ... def add_manifests(self, manifest_files: List[ManifestFile]) -> ManifestListWriter: self._writer.write_block([self.prepare_manifest(manifest_file) for manifest_file in manifest_files]) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 1a4c39cfd2..ebeaa19b54 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -962,16 +962,13 @@ def projection(self) -> Schema: return current_schema.select(*self.selected_fields, case_sensitive=self.case_sensitive) @abstractmethod - def plan_files(self) -> Iterable[ScanTask]: - ... + def plan_files(self) -> Iterable[ScanTask]: ... @abstractmethod - def to_arrow(self) -> pa.Table: - ... + def to_arrow(self) -> pa.Table: ... @abstractmethod - def to_pandas(self, **kwargs: Any) -> pd.DataFrame: - ... + def to_pandas(self, **kwargs: Any) -> pd.DataFrame: ... def update(self: S, **overrides: Any) -> S: """Create a copy of this table scan with updated fields.""" diff --git a/pyiceberg/table/name_mapping.py b/pyiceberg/table/name_mapping.py index b07fbc0735..ffe96359a8 100644 --- a/pyiceberg/table/name_mapping.py +++ b/pyiceberg/table/name_mapping.py @@ -20,6 +20,7 @@ More information can be found on here: https://iceberg.apache.org/spec/#name-mapping-serialization """ + from __future__ import annotations from abc import ABC, abstractmethod diff --git a/pyiceberg/transforms.py b/pyiceberg/transforms.py index b9afae9a7e..2c91f1c92b 100644 --- a/pyiceberg/transforms.py +++ b/pyiceberg/transforms.py @@ -132,20 +132,17 @@ class Transform(IcebergRootModel[str], ABC, Generic[S, T]): root: str = Field() @abstractmethod - def transform(self, source: IcebergType) -> Callable[[Optional[S]], Optional[T]]: - ... + def transform(self, source: IcebergType) -> Callable[[Optional[S]], Optional[T]]: ... @abstractmethod def can_transform(self, source: IcebergType) -> bool: return False @abstractmethod - def result_type(self, source: IcebergType) -> IcebergType: - ... + def result_type(self, source: IcebergType) -> IcebergType: ... @abstractmethod - def project(self, name: str, pred: BoundPredicate[L]) -> Optional[UnboundPredicate[Any]]: - ... + def project(self, name: str, pred: BoundPredicate[L]) -> Optional[UnboundPredicate[Any]]: ... @property def preserves_order(self) -> bool: @@ -285,8 +282,7 @@ class TimeResolution(IntEnum): class TimeTransform(Transform[S, int], Generic[S], Singleton): @property @abstractmethod - def granularity(self) -> TimeResolution: - ... + def granularity(self) -> TimeResolution: ... def satisfies_order_of(self, other: Transform[S, T]) -> bool: return self.granularity <= other.granularity if hasattr(other, "granularity") else False @@ -295,8 +291,7 @@ def result_type(self, source: IcebergType) -> IntegerType: return IntegerType() @abstractmethod - def transform(self, source: IcebergType) -> Callable[[Optional[Any]], Optional[int]]: - ... + def transform(self, source: IcebergType) -> Callable[[Optional[Any]], Optional[int]]: ... def project(self, name: str, pred: BoundPredicate[L]) -> Optional[UnboundPredicate[Any]]: transformer = self.transform(pred.term.ref().field.field_type) diff --git a/pyiceberg/types.py b/pyiceberg/types.py index b8fdedea51..5e7c193295 100644 --- a/pyiceberg/types.py +++ b/pyiceberg/types.py @@ -29,6 +29,7 @@ Notes: - https://iceberg.apache.org/#spec/#primitive-types """ + from __future__ import annotations import re diff --git a/pyiceberg/utils/concurrent.py b/pyiceberg/utils/concurrent.py index 75d6ff8365..f6c0a23a9c 100644 --- a/pyiceberg/utils/concurrent.py +++ b/pyiceberg/utils/concurrent.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. """Concurrency concepts that support efficient multi-threading.""" + from concurrent.futures import Executor, ThreadPoolExecutor from typing import Optional diff --git a/pyiceberg/utils/datetime.py b/pyiceberg/utils/datetime.py index 1ab56d6ea3..0cb6926efa 100644 --- a/pyiceberg/utils/datetime.py +++ b/pyiceberg/utils/datetime.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. """Helper methods for working with date/time representations.""" + from __future__ import annotations import re diff --git a/pyiceberg/utils/decimal.py b/pyiceberg/utils/decimal.py index 503545b69b..c2125d6734 100644 --- a/pyiceberg/utils/decimal.py +++ b/pyiceberg/utils/decimal.py @@ -16,6 +16,7 @@ # under the License. """Helper methods for working with Python Decimals.""" + import math from decimal import Decimal from typing import Optional, Union diff --git a/pyiceberg/utils/schema_conversion.py b/pyiceberg/utils/schema_conversion.py index d4b7aab4f1..2cceecc639 100644 --- a/pyiceberg/utils/schema_conversion.py +++ b/pyiceberg/utils/schema_conversion.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. """Utility class for converting between Avro and Iceberg schemas.""" + import logging from typing import ( Any, diff --git a/pyiceberg/utils/singleton.py b/pyiceberg/utils/singleton.py index 9380b89cbc..90e909ecbc 100644 --- a/pyiceberg/utils/singleton.py +++ b/pyiceberg/utils/singleton.py @@ -27,6 +27,7 @@ More information on metaclasses: https://docs.python.org/3/reference/datamodel.html#metaclasses """ + from typing import Any, ClassVar, Dict diff --git a/pyproject.toml b/pyproject.toml index 31aecb26b1..6fb8d261eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -356,3 +356,6 @@ detect-same-package = true lines-between-types = 0 known-first-party = ["pyiceberg", "tests"] section-order = ["future", "standard-library", "third-party", "first-party", "local-folder"] + +[tool.ruff.format] +quote-style = "preserve" diff --git a/tests/avro/test_reader.py b/tests/avro/test_reader.py index 48ee8911da..c97d421d87 100644 --- a/tests/avro/test_reader.py +++ b/tests/avro/test_reader.py @@ -353,7 +353,9 @@ def test_read_struct_lambda(decoder_class: Callable[[bytes], BinaryDecoder]) -> struct = StructType(NestedField(1, "id", IntegerType(), required=True)) # You can also pass in an arbitrary function that returns a struct result = StructReader( - ((0, IntegerReader()),), lambda struct: Record(struct=struct), struct # pylint: disable=unnecessary-lambda + ((0, IntegerReader()),), + lambda struct: Record(struct=struct), + struct, # pylint: disable=unnecessary-lambda ).read(decoder) assert repr(result) == "Record[id=12]" diff --git a/tests/avro/test_resolver.py b/tests/avro/test_resolver.py index 51d2a7d8fc..c08ca235fb 100644 --- a/tests/avro/test_resolver.py +++ b/tests/avro/test_resolver.py @@ -318,34 +318,30 @@ def test_resolver_initial_value() -> None: def test_resolve_writer() -> None: actual = resolve_writer(record_schema=MANIFEST_ENTRY_SCHEMAS[2], file_schema=MANIFEST_ENTRY_SCHEMAS[1]) - expected = StructWriter( + expected = StructWriter(( + (0, IntegerWriter()), + (1, IntegerWriter()), ( - (0, IntegerWriter()), - (1, IntegerWriter()), - ( - 4, - StructWriter( - ( - (1, StringWriter()), - (2, StringWriter()), - (3, StructWriter(())), - (4, IntegerWriter()), - (5, IntegerWriter()), - (None, DefaultWriter(writer=IntegerWriter(), value=67108864)), - (6, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), - (7, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), - (8, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), - (9, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), - (10, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), - (11, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), - (12, OptionWriter(option=BinaryWriter())), - (13, OptionWriter(option=ListWriter(element_writer=IntegerWriter()))), - (15, OptionWriter(option=IntegerWriter())), - ) - ), - ), - ) - ) + 4, + StructWriter(( + (1, StringWriter()), + (2, StringWriter()), + (3, StructWriter(())), + (4, IntegerWriter()), + (5, IntegerWriter()), + (None, DefaultWriter(writer=IntegerWriter(), value=67108864)), + (6, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (7, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (8, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (9, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=IntegerWriter()))), + (10, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), + (11, OptionWriter(option=MapWriter(key_writer=IntegerWriter(), value_writer=BinaryWriter()))), + (12, OptionWriter(option=BinaryWriter())), + (13, OptionWriter(option=ListWriter(element_writer=IntegerWriter()))), + (15, OptionWriter(option=IntegerWriter())), + )), + ), + )) assert actual == expected diff --git a/tests/avro/test_writer.py b/tests/avro/test_writer.py index 991d9d1ae7..0bae9ece8c 100644 --- a/tests/avro/test_writer.py +++ b/tests/avro/test_writer.py @@ -178,17 +178,15 @@ class MyStruct(Record): construct_writer(schema).write(encoder, my_struct) - assert output.getbuffer() == b"".join( - [ - b"\x18", - zigzag_encode(len(my_struct.properties)), - zigzag_encode(1), - zigzag_encode(2), - zigzag_encode(3), - zigzag_encode(4), - b"\x00", - ] - ) + assert output.getbuffer() == b"".join([ + b"\x18", + zigzag_encode(len(my_struct.properties)), + zigzag_encode(1), + zigzag_encode(2), + zigzag_encode(3), + zigzag_encode(4), + b"\x00", + ]) def test_write_struct_with_list() -> None: @@ -208,17 +206,15 @@ class MyStruct(Record): construct_writer(schema).write(encoder, my_struct) - assert output.getbuffer() == b"".join( - [ - b"\x18", - zigzag_encode(len(my_struct.properties)), - zigzag_encode(1), - zigzag_encode(2), - zigzag_encode(3), - zigzag_encode(4), - b"\x00", - ] - ) + assert output.getbuffer() == b"".join([ + b"\x18", + zigzag_encode(len(my_struct.properties)), + zigzag_encode(1), + zigzag_encode(2), + zigzag_encode(3), + zigzag_encode(4), + b"\x00", + ]) def test_write_decimal() -> None: diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index c7cd1c4fa1..911c06b27a 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -88,20 +88,18 @@ def create_table( self.__namespaces[namespace] = {} new_location = location or f's3://warehouse/{"/".join(identifier)}/data' - metadata = TableMetadataV1( - **{ - "format-version": 1, - "table-uuid": "d20125c8-7284-442c-9aea-15fee620737c", - "location": new_location, - "last-updated-ms": 1602638573874, - "last-column-id": schema.highest_field_id, - "schema": schema.model_dump(), - "partition-spec": partition_spec.model_dump()["fields"], - "properties": properties, - "current-snapshot-id": -1, - "snapshots": [{"snapshot-id": 1925, "timestamp-ms": 1602638573822}], - } - ) + metadata = TableMetadataV1(**{ + "format-version": 1, + "table-uuid": "d20125c8-7284-442c-9aea-15fee620737c", + "location": new_location, + "last-updated-ms": 1602638573874, + "last-column-id": schema.highest_field_id, + "schema": schema.model_dump(), + "partition-spec": partition_spec.model_dump()["fields"], + "properties": properties, + "current-snapshot-id": -1, + "snapshots": [{"snapshot-id": 1925, "timestamp-ms": 1602638573822}], + }) table = Table( identifier=identifier, metadata=metadata, diff --git a/tests/catalog/test_dynamodb.py b/tests/catalog/test_dynamodb.py index f28b70d3ac..917a5d2937 100644 --- a/tests/catalog/test_dynamodb.py +++ b/tests/catalog/test_dynamodb.py @@ -16,6 +16,7 @@ # under the License. from typing import List +import boto3 import pytest from moto import mock_dynamodb @@ -308,7 +309,9 @@ def test_fail_on_rename_table_with_missing_required_params(_bucket_initialize: N @mock_dynamodb -def test_fail_on_rename_non_iceberg_table(_dynamodb, _bucket_initialize: None, database_name: str, table_name: str) -> None: # type: ignore +def test_fail_on_rename_non_iceberg_table( + _dynamodb: boto3.client, _bucket_initialize: None, database_name: str, table_name: str +) -> None: new_database_name = f"{database_name}_new" new_table_name = f"{table_name}_new" identifier = (database_name, table_name) diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index 2a33c8e23d..f84ed4ad20 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -17,6 +17,7 @@ from typing import Any, Dict, List from unittest import mock +import boto3 import pytest from moto import mock_glue @@ -273,7 +274,9 @@ def test_rename_table_from_self_identifier( @mock_glue -def test_rename_table_no_params(_glue, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str) -> None: # type: ignore +def test_rename_table_no_params( + _glue: boto3.client, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str +) -> None: new_database_name = f"{database_name}_new" new_table_name = f"{table_name}_new" identifier = (database_name, table_name) @@ -290,7 +293,9 @@ def test_rename_table_no_params(_glue, _bucket_initialize: None, moto_endpoint_u @mock_glue -def test_rename_non_iceberg_table(_glue, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str) -> None: # type: ignore +def test_rename_non_iceberg_table( + _glue: boto3.client, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str +) -> None: new_database_name = f"{database_name}_new" new_table_name = f"{table_name}_new" identifier = (database_name, table_name) diff --git a/tests/conftest.py b/tests/conftest.py index e54f1f54d6..07beba07e0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -24,6 +24,7 @@ and the built-in pytest fixture request should be used as an additional argument in the function. The fixture can then be retrieved using `request.getfixturevalue(fixture_name)`. """ + import os import re import string diff --git a/tests/expressions/test_expressions.py b/tests/expressions/test_expressions.py index bce23bf39c..f277672d87 100644 --- a/tests/expressions/test_expressions.py +++ b/tests/expressions/test_expressions.py @@ -476,7 +476,8 @@ def test_less_than_or_equal_invert() -> None: ) def test_bind(pred: UnboundPredicate[Any], table_schema_simple: Schema) -> None: assert pred.bind(table_schema_simple, case_sensitive=True).term.field == table_schema_simple.find_field( # type: ignore - pred.term.name, case_sensitive=True # type: ignore + pred.term.name, # type: ignore + case_sensitive=True, ) @@ -495,7 +496,8 @@ def test_bind(pred: UnboundPredicate[Any], table_schema_simple: Schema) -> None: ) def test_bind_case_insensitive(pred: UnboundPredicate[Any], table_schema_simple: Schema) -> None: assert pred.bind(table_schema_simple, case_sensitive=False).term.field == table_schema_simple.find_field( # type: ignore - pred.term.name, case_sensitive=False # type: ignore + pred.term.name, # type: ignore + case_sensitive=False, ) diff --git a/tests/expressions/test_visitors.py b/tests/expressions/test_visitors.py index 50db90ceac..94bfcf076c 100644 --- a/tests/expressions/test_visitors.py +++ b/tests/expressions/test_visitors.py @@ -169,15 +169,11 @@ def visit_not_equal(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[s self.visit_history.append("NOT_EQUAL") return self.visit_history - def visit_greater_than_or_equal( - self, term: BoundTerm[Any], literal: Literal[Any] - ) -> List[str]: # pylint: disable=redefined-outer-name + def visit_greater_than_or_equal(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[str]: # pylint: disable=redefined-outer-name self.visit_history.append("GREATER_THAN_OR_EQUAL") return self.visit_history - def visit_greater_than( - self, term: BoundTerm[Any], literal: Literal[Any] - ) -> List[str]: # pylint: disable=redefined-outer-name + def visit_greater_than(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[str]: # pylint: disable=redefined-outer-name self.visit_history.append("GREATER_THAN") return self.visit_history @@ -185,9 +181,7 @@ def visit_less_than(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[s self.visit_history.append("LESS_THAN") return self.visit_history - def visit_less_than_or_equal( - self, term: BoundTerm[Any], literal: Literal[Any] - ) -> List[str]: # pylint: disable=redefined-outer-name + def visit_less_than_or_equal(self, term: BoundTerm[Any], literal: Literal[Any]) -> List[str]: # pylint: disable=redefined-outer-name self.visit_history.append("LESS_THAN_OR_EQUAL") return self.visit_history diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index be5f68e429..7a1868f92a 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -447,13 +447,11 @@ def test_binary_type_to_pyarrow() -> None: def test_struct_type_to_pyarrow(table_schema_simple: Schema) -> None: - expected = pa.struct( - [ - pa.field("foo", pa.string(), nullable=True, metadata={"field_id": "1"}), - pa.field("bar", pa.int32(), nullable=False, metadata={"field_id": "2"}), - pa.field("baz", pa.bool_(), nullable=True, metadata={"field_id": "3"}), - ] - ) + expected = pa.struct([ + pa.field("foo", pa.string(), nullable=True, metadata={"field_id": "1"}), + pa.field("bar", pa.int32(), nullable=False, metadata={"field_id": "2"}), + pa.field("baz", pa.bool_(), nullable=True, metadata={"field_id": "3"}), + ]) assert visit(table_schema_simple.as_struct(), _ConvertToArrowSchema()) == expected diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index 5194d8660e..786b5ea1d3 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -207,13 +207,11 @@ def test_pyarrow_variable_binary_to_iceberg() -> None: def test_pyarrow_struct_to_iceberg() -> None: - pyarrow_struct = pa.struct( - [ - pa.field("foo", pa.string(), nullable=True, metadata={"field_id": "1", "doc": "foo doc"}), - pa.field("bar", pa.int32(), nullable=False, metadata={"field_id": "2"}), - pa.field("baz", pa.bool_(), nullable=True, metadata={"field_id": "3"}), - ] - ) + pyarrow_struct = pa.struct([ + pa.field("foo", pa.string(), nullable=True, metadata={"field_id": "1", "doc": "foo doc"}), + pa.field("bar", pa.int32(), nullable=False, metadata={"field_id": "2"}), + pa.field("baz", pa.bool_(), nullable=True, metadata={"field_id": "3"}), + ]) expected = StructType( NestedField(field_id=1, name="foo", field_type=StringType(), required=False, doc="foo doc"), NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), diff --git a/tests/table/test_init.py b/tests/table/test_init.py index 547990c4c8..925e98d1eb 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -424,7 +424,9 @@ def test_match_deletes_to_datafile_duplicate_number() -> None: def test_serialize_set_properties_updates() -> None: - assert SetPropertiesUpdate(updates={"abc": "🤪"}).model_dump_json() == """{"action":"set-properties","updates":{"abc":"🤪"}}""" + assert ( + SetPropertiesUpdate(updates={"abc": "🤪"}).model_dump_json() == """{"action":"set-properties","updates":{"abc":"🤪"}}""" + ) def test_add_column(table_v2: Table) -> None: @@ -535,18 +537,14 @@ def test_apply_add_schema_update(table_v2: Table) -> None: test_context = _TableMetadataUpdateContext() - new_table_metadata = _apply_table_update( - transaction._updates[0], base_metadata=table_v2.metadata, context=test_context - ) # pylint: disable=W0212 + new_table_metadata = _apply_table_update(transaction._updates[0], base_metadata=table_v2.metadata, context=test_context) # pylint: disable=W0212 assert len(new_table_metadata.schemas) == 3 assert new_table_metadata.current_schema_id == 1 assert len(test_context._updates) == 1 assert test_context._updates[0] == transaction._updates[0] # pylint: disable=W0212 assert test_context.is_added_schema(2) - new_table_metadata = _apply_table_update( - transaction._updates[1], base_metadata=new_table_metadata, context=test_context - ) # pylint: disable=W0212 + new_table_metadata = _apply_table_update(transaction._updates[1], base_metadata=new_table_metadata, context=test_context) # pylint: disable=W0212 assert len(new_table_metadata.schemas) == 3 assert new_table_metadata.current_schema_id == 2 assert len(test_context._updates) == 2 diff --git a/tests/table/test_name_mapping.py b/tests/table/test_name_mapping.py index 37111a5e3e..d74aa3234c 100644 --- a/tests/table/test_name_mapping.py +++ b/tests/table/test_name_mapping.py @@ -22,51 +22,49 @@ @pytest.fixture(scope="session") def table_name_mapping_nested() -> NameMapping: - return NameMapping( - [ - MappedField(field_id=1, names=['foo']), - MappedField(field_id=2, names=['bar']), - MappedField(field_id=3, names=['baz']), - MappedField(field_id=4, names=['qux'], fields=[MappedField(field_id=5, names=['element'])]), - MappedField( - field_id=6, - names=['quux'], - fields=[ - MappedField(field_id=7, names=['key']), - MappedField( - field_id=8, - names=['value'], - fields=[ - MappedField(field_id=9, names=['key']), - MappedField(field_id=10, names=['value']), - ], - ), - ], - ), - MappedField( - field_id=11, - names=['location'], - fields=[ - MappedField( - field_id=12, - names=['element'], - fields=[ - MappedField(field_id=13, names=['latitude']), - MappedField(field_id=14, names=['longitude']), - ], - ) - ], - ), - MappedField( - field_id=15, - names=['person'], - fields=[ - MappedField(field_id=16, names=['name']), - MappedField(field_id=17, names=['age']), - ], - ), - ] - ) + return NameMapping([ + MappedField(field_id=1, names=['foo']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + MappedField(field_id=4, names=['qux'], fields=[MappedField(field_id=5, names=['element'])]), + MappedField( + field_id=6, + names=['quux'], + fields=[ + MappedField(field_id=7, names=['key']), + MappedField( + field_id=8, + names=['value'], + fields=[ + MappedField(field_id=9, names=['key']), + MappedField(field_id=10, names=['value']), + ], + ), + ], + ), + MappedField( + field_id=11, + names=['location'], + fields=[ + MappedField( + field_id=12, + names=['element'], + fields=[ + MappedField(field_id=13, names=['latitude']), + MappedField(field_id=14, names=['longitude']), + ], + ) + ], + ), + MappedField( + field_id=15, + names=['person'], + fields=[ + MappedField(field_id=16, names=['name']), + MappedField(field_id=17, names=['age']), + ], + ), + ]) def test_json_mapped_field_deserialization() -> None: @@ -127,20 +125,18 @@ def test_json_name_mapping_deserialization() -> None: ] """ - assert parse_mapping_from_json(name_mapping) == NameMapping( - [ - MappedField(field_id=1, names=['id', 'record_id']), - MappedField(field_id=2, names=['data']), - MappedField( - names=['location'], - field_id=3, - fields=[ - MappedField(field_id=4, names=['latitude', 'lat']), - MappedField(field_id=5, names=['longitude', 'long']), - ], - ), - ] - ) + assert parse_mapping_from_json(name_mapping) == NameMapping([ + MappedField(field_id=1, names=['id', 'record_id']), + MappedField(field_id=2, names=['data']), + MappedField( + names=['location'], + field_id=3, + fields=[ + MappedField(field_id=4, names=['latitude', 'lat']), + MappedField(field_id=5, names=['longitude', 'long']), + ], + ), + ]) def test_json_serialization(table_name_mapping_nested: NameMapping) -> None: @@ -151,20 +147,18 @@ def test_json_serialization(table_name_mapping_nested: NameMapping) -> None: def test_name_mapping_to_string() -> None: - nm = NameMapping( - [ - MappedField(field_id=1, names=['id', 'record_id']), - MappedField(field_id=2, names=['data']), - MappedField( - names=['location'], - field_id=3, - fields=[ - MappedField(field_id=4, names=['lat', 'latitude']), - MappedField(field_id=5, names=['long', 'longitude']), - ], - ), - ] - ) + nm = NameMapping([ + MappedField(field_id=1, names=['id', 'record_id']), + MappedField(field_id=2, names=['data']), + MappedField( + names=['location'], + field_id=3, + fields=[ + MappedField(field_id=4, names=['lat', 'latitude']), + MappedField(field_id=5, names=['long', 'longitude']), + ], + ), + ]) assert ( str(nm) diff --git a/tests/test_conversions.py b/tests/test_conversions.py index 3b3e519579..f57998aa4e 100644 --- a/tests/test_conversions.py +++ b/tests/test_conversions.py @@ -71,6 +71,7 @@ - -4.5F is -1 * 2ˆ2 * 1.125 and encoded as 11000000|10010000|0...0 in binary - 00000000 -> 0, 00000000 -> 0, 10010000 -> 144 (-112), 11000000 -> 192 (-64), """ + import struct import uuid from datetime import ( diff --git a/tests/test_transforms.py b/tests/test_transforms.py index ed0c495a14..d7a1478bc4 100644 --- a/tests/test_transforms.py +++ b/tests/test_transforms.py @@ -724,9 +724,7 @@ def test_projection_day_human(bound_reference_date: BoundReference[int]) -> None assert DayTransform().project( "dt", BoundGreaterThanOrEqual(term=bound_reference_date, literal=date_literal) - ) == GreaterThanOrEqual( - term="dt", literal=17532 - ) # >= 2018, 1, 1 + ) == GreaterThanOrEqual(term="dt", literal=17532) # >= 2018, 1, 1 assert DayTransform().project("dt", BoundGreaterThan(term=bound_reference_date, literal=date_literal)) == GreaterThanOrEqual( term="dt", literal=17533 From d0a70c4f25400986907f9d8c37dccab0eb78f9f9 Mon Sep 17 00:00:00 2001 From: Hussein Awala Date: Fri, 5 Jan 2024 22:41:44 +0100 Subject: [PATCH 010/169] Fix lint tests failed in main (#253) --- tests/table/test_init.py | 96 +++++++++++++++++++--------------------- 1 file changed, 46 insertions(+), 50 deletions(-) diff --git a/tests/table/test_init.py b/tests/table/test_init.py index 925e98d1eb..475814a051 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -850,56 +850,52 @@ def test_assert_default_sort_order_id(table_v2: Table) -> None: def test_correct_schema() -> None: - table_metadata = TableMetadataV2( - **{ - "format-version": 2, - "table-uuid": "9c12d441-03fe-4693-9a96-a0705ddf69c1", - "location": "s3://bucket/test/location", - "last-sequence-number": 34, - "last-updated-ms": 1602638573590, - "last-column-id": 3, - "current-schema-id": 1, - "schemas": [ - {"type": "struct", "schema-id": 0, "fields": [{"id": 1, "name": "x", "required": True, "type": "long"}]}, - { - "type": "struct", - "schema-id": 1, - "identifier-field-ids": [1, 2], - "fields": [ - {"id": 1, "name": "x", "required": True, "type": "long"}, - {"id": 2, "name": "y", "required": True, "type": "long"}, - {"id": 3, "name": "z", "required": True, "type": "long"}, - ], - }, - ], - "default-spec-id": 0, - "partition-specs": [ - {"spec-id": 0, "fields": [{"name": "x", "transform": "identity", "source-id": 1, "field-id": 1000}]} - ], - "last-partition-id": 1000, - "default-sort-order-id": 0, - "sort-orders": [], - "current-snapshot-id": 123, - "snapshots": [ - { - "snapshot-id": 234, - "timestamp-ms": 1515100955770, - "sequence-number": 0, - "summary": {"operation": "append"}, - "manifest-list": "s3://a/b/1.avro", - "schema-id": 10, - }, - { - "snapshot-id": 123, - "timestamp-ms": 1515100955770, - "sequence-number": 0, - "summary": {"operation": "append"}, - "manifest-list": "s3://a/b/1.avro", - "schema-id": 0, - }, - ], - } - ) + table_metadata = TableMetadataV2(**{ + "format-version": 2, + "table-uuid": "9c12d441-03fe-4693-9a96-a0705ddf69c1", + "location": "s3://bucket/test/location", + "last-sequence-number": 34, + "last-updated-ms": 1602638573590, + "last-column-id": 3, + "current-schema-id": 1, + "schemas": [ + {"type": "struct", "schema-id": 0, "fields": [{"id": 1, "name": "x", "required": True, "type": "long"}]}, + { + "type": "struct", + "schema-id": 1, + "identifier-field-ids": [1, 2], + "fields": [ + {"id": 1, "name": "x", "required": True, "type": "long"}, + {"id": 2, "name": "y", "required": True, "type": "long"}, + {"id": 3, "name": "z", "required": True, "type": "long"}, + ], + }, + ], + "default-spec-id": 0, + "partition-specs": [{"spec-id": 0, "fields": [{"name": "x", "transform": "identity", "source-id": 1, "field-id": 1000}]}], + "last-partition-id": 1000, + "default-sort-order-id": 0, + "sort-orders": [], + "current-snapshot-id": 123, + "snapshots": [ + { + "snapshot-id": 234, + "timestamp-ms": 1515100955770, + "sequence-number": 0, + "summary": {"operation": "append"}, + "manifest-list": "s3://a/b/1.avro", + "schema-id": 10, + }, + { + "snapshot-id": 123, + "timestamp-ms": 1515100955770, + "sequence-number": 0, + "summary": {"operation": "append"}, + "manifest-list": "s3://a/b/1.avro", + "schema-id": 0, + }, + ], + }) t = Table( identifier=("default", "t1"), From 3d844442346f85ec34eab13a00700dc3d903de7d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Jan 2024 06:59:03 -0800 Subject: [PATCH 011/169] Build: Bump moto from 4.2.12 to 4.2.13 (#257) Bumps [moto](https://github.com/getmoto/moto) from 4.2.12 to 4.2.13. - [Release notes](https://github.com/getmoto/moto/releases) - [Changelog](https://github.com/getmoto/moto/blob/master/CHANGELOG.md) - [Commits](https://github.com/getmoto/moto/compare/4.2.12...4.2.13) --- updated-dependencies: - dependency-name: moto dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index b77c97affb..80d9cda962 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1929,13 +1929,13 @@ files = [ [[package]] name = "moto" -version = "4.2.12" +version = "4.2.13" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "moto-4.2.12-py2.py3-none-any.whl", hash = "sha256:bdcad46e066a55b7d308a786e5dca863b3cba04c6239c6974135a48d1198b3ab"}, - {file = "moto-4.2.12.tar.gz", hash = "sha256:7c4d37f47becb4a0526b64df54484e988c10fde26861fc3b5c065bc78800cb59"}, + {file = "moto-4.2.13-py2.py3-none-any.whl", hash = "sha256:93e0fd13b624bd79115494f833308c3641b2be0fc9f4f18aa9264aa01f6168e0"}, + {file = "moto-4.2.13.tar.gz", hash = "sha256:01aef6a489a725c8d725bd3dc6f70ff1bedaee3e2641752e4b471ff0ede4b4d7"}, ] [package.dependencies] @@ -4182,4 +4182,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "095bbebf0fd3d259025d05972f34aae6368b854ed59eb6567ef2c6da9f0da09f" +content-hash = "6ebb619f12461b94abd8264a577a2484dc2069b5f37ac65dfaba457b61b3dfa3" diff --git a/pyproject.toml b/pyproject.toml index 6fb8d261eb..38d1287f49 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ pre-commit = "3.5.0" fastavro = "1.9.2" coverage = { version = "^7.4.0", extras = ["toml"] } requests-mock = "1.11.0" -moto = { version = "^4.2.12", extras = ["server"] } +moto = { version = "^4.2.13", extras = ["server"] } typing-extensions = "4.9.0" pytest-mock = "3.12.0" cython = "3.0.7" From 67c07f2ea745ac45ff7930ecd6fc5b98b88b5328 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Jan 2024 06:59:39 -0800 Subject: [PATCH 012/169] Build: Bump mkdocstrings-python from 1.7.5 to 1.8.0 (#256) Bumps [mkdocstrings-python](https://github.com/mkdocstrings/python) from 1.7.5 to 1.8.0. - [Release notes](https://github.com/mkdocstrings/python/releases) - [Changelog](https://github.com/mkdocstrings/python/blob/main/CHANGELOG.md) - [Commits](https://github.com/mkdocstrings/python/compare/1.7.5...1.8.0) --- updated-dependencies: - dependency-name: mkdocstrings-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 3692b7c92d..8b128bdfef 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -19,7 +19,7 @@ mkdocs==1.5.3 griffe==0.38.1 jinja2==3.1.2 mkdocstrings==0.24.0 -mkdocstrings-python==1.7.5 +mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 From feafbf0c05c779d1e0287857188343ea6c48a04e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Jan 2024 15:36:04 -0800 Subject: [PATCH 013/169] Build: Bump fastavro from 1.9.2 to 1.9.3 (#258) Bumps [fastavro](https://github.com/fastavro/fastavro) from 1.9.2 to 1.9.3. - [Release notes](https://github.com/fastavro/fastavro/releases) - [Changelog](https://github.com/fastavro/fastavro/blob/master/ChangeLog) - [Commits](https://github.com/fastavro/fastavro/compare/1.9.2...1.9.3) --- updated-dependencies: - dependency-name: fastavro dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 66 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/poetry.lock b/poetry.lock index 80d9cda962..03d755f88b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -939,42 +939,42 @@ test = ["pytest (>=6)"] [[package]] name = "fastavro" -version = "1.9.2" +version = "1.9.3" description = "Fast read/write of AVRO files" optional = false python-versions = ">=3.8" files = [ - {file = "fastavro-1.9.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:223cecf135fd29b83ca6a30035b15b8db169aeaf8dc4f9a5d34afadc4b31638a"}, - {file = "fastavro-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e08c9be8c6f7eed2cf30f8b64d50094cba38a81b751c7db9f9c4be2656715259"}, - {file = "fastavro-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394f06cc865c6fbae3bbca323633a28a5d914c55dc2c1cdefb75432456ef8f6f"}, - {file = "fastavro-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7a7caadd47bdd04bda534ff70b4b98d2823800c488fd911918115aec4c4dc09b"}, - {file = "fastavro-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:68478a1b8a583d83ad6550e9dceac6cbb148a99a52c3559a0413bf4c0b9c8786"}, - {file = "fastavro-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:b59a1123f1d534743af33fdbda80dd7b9146685bdd7931eae12bee6203065222"}, - {file = "fastavro-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:887c20dc527a549764c91f9e48ece071f2f26d217af66ebcaeb87bf29578fee5"}, - {file = "fastavro-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46458f78b481c12db62d3d8a81bae09cb0b5b521c0d066c6856fc2746908d00d"}, - {file = "fastavro-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f4a2a4bed0e829f79fa1e4f172d484b2179426e827bcc80c0069cc81328a5af"}, - {file = "fastavro-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6167f9bbe1c5a28fbc2db767f97dbbb4981065e6eeafd4e613f6fe76c576ffd4"}, - {file = "fastavro-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d574bc385f820da0404528157238de4e5fdd775d2cb3d05b3b0f1b475d493837"}, - {file = "fastavro-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ec600eb15b3ec931904c5bf8da62b3b725cb0f369add83ba47d7b5e9322f92a0"}, - {file = "fastavro-1.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c82b0761503420cd45f7f50bc31975ac1c75b5118e15434c1d724b751abcc249"}, - {file = "fastavro-1.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db62d9b8c944b8d9c481e5f980d5becfd034bdd58c72e27c9333bd504b06bda0"}, - {file = "fastavro-1.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65e61f040bc9494646f42a466e9cd428783b82d7161173f3296710723ba5a453"}, - {file = "fastavro-1.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6278b93cdd5bef1778c0232ce1f265137f90bc6be97a5c1dd7e0d99a406c0488"}, - {file = "fastavro-1.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cd003ddea5d89720194b6e57011c37221d9fc4ddc750e6f4723516eb659be686"}, - {file = "fastavro-1.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:43f09d100a26e8b59f30dde664d93e423b648e008abfc43132608a18fe8ddcc2"}, - {file = "fastavro-1.9.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:3ddffeff5394f285c69f9cd481f47b6cf62379840cdbe6e0dc74683bd589b56e"}, - {file = "fastavro-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e75a2b2ec697d2058a7d96522e921f03f174cf9049ace007c24be7ab58c5370"}, - {file = "fastavro-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd2e8fd0567483eb0fdada1b979ad4d493305dfdd3f351c82a87df301f0ae1f"}, - {file = "fastavro-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c652dbe3f087c943a5b89f9a50a574e64f23790bfbec335ce2b91a2ae354a443"}, - {file = "fastavro-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba73e9a1822162f1b3a43de0362f29880014c5c4d49d63ad7fcce339ef73ea2"}, - {file = "fastavro-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:beeef2964bbfd09c539424808539b956d7425afbb7055b89e2aa311374748b56"}, - {file = "fastavro-1.9.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:d5fa48266d75e057b27d8586b823d6d7d7c94593fd989d75033eb4c8078009fb"}, - {file = "fastavro-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b69aeb0d063f5955a0e412f9779444fc452568a49db75a90a8d372f9cb4a01c8"}, - {file = "fastavro-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ce336c59fb40fdb8751bda8cc6076cfcdf9767c3c107f6049e049166b26c61f"}, - {file = "fastavro-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:581036e18661f045415a51ad528865e1d7ba5a9690a3dede9e6ea50f94ed6c4c"}, - {file = "fastavro-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b6b5c3cda569c0a130fd2d08d4c53a326ede7e05174a24eda08f7698f70eda"}, - {file = "fastavro-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:d33e40f246bf07f106f9d2da68d0234efcc62276b6e35bde00ff920ea7f871fd"}, - {file = "fastavro-1.9.2.tar.gz", hash = "sha256:5c1ffad986200496bd69b5c4748ae90b5d934d3b1456f33147bee3a0bb17f89b"}, + {file = "fastavro-1.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5e9b2e1427fb84c0754bc34923d10cabcf2ed23230201208a1371ab7b6027674"}, + {file = "fastavro-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ef82f86ae276309abc0072598474b6be68105a0b28f8d7cc0398d1d353d7de"}, + {file = "fastavro-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280ef7ab7232ecb2097038d6842416ec717d0e1c314b80ff245f85201f3396a4"}, + {file = "fastavro-1.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a36cfc0421ed7576ecb1c22de7bd1dedcce62aebbffcc597379d59171e5d76e"}, + {file = "fastavro-1.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d80f2e20199140eb8c036b4393e9bc9eff325543311b958c72318999499d4279"}, + {file = "fastavro-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:a435f7edd7c5b52cee3f23ca950cd9373ab35cf2aa3d269b3d6aca7e2fc1372c"}, + {file = "fastavro-1.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a7053ed10194ec53754f5337b57b3273a74b48505edcd6edb79fe3c4cd259c0"}, + {file = "fastavro-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853e01f13534d1baa0a3d493a8573e665e93ffa35b4bf1d125e21764d343af8e"}, + {file = "fastavro-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5a279cda25d876e6f120950cadf184a307fd8998f9a22a90bb62e6749f88d1e"}, + {file = "fastavro-1.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:63d6f928840f3fb1f2e1fe20bc8b7d0e1a51ba4bb0e554ecb837a669fba31288"}, + {file = "fastavro-1.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8807046edc78f50b3ea5f55f6a534c87b2a13538e7c56fec3532ef802bcae333"}, + {file = "fastavro-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:e502579da4a51c5630eadbd811a1b3d262d6e783bf19998cfb33d2ea0cf6f516"}, + {file = "fastavro-1.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6b665efe442061df8d9608c2fb692847df85d52ad825b776c441802f0dfa6571"}, + {file = "fastavro-1.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b8c96d81f0115633489d7f1133a03832922629a61ca81c1d47b482ddcda3b94"}, + {file = "fastavro-1.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338c7ec94dd2474c4679e44d2560a1922cb6fa99acbb7b18957264baf8eadfc7"}, + {file = "fastavro-1.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a509b34c9af71a109c633631ac2f6d2209830e13200d0048f7e9c057fd563f8f"}, + {file = "fastavro-1.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:967edefab470987c024cd5a1fcd04744a50a91e740c7bdf325181043a47f1083"}, + {file = "fastavro-1.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:033c15e8ed02f80f01d58be1cd880b09fd444faf277263d563a727711d47a98a"}, + {file = "fastavro-1.9.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:6b38723327603d77080aec56628e13a739415f8596ca0cc41a905615977c6d6b"}, + {file = "fastavro-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046d75c4400941fd08f0a6855a34ae63bf02ea01f366b5b749942abe10640056"}, + {file = "fastavro-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ab312b8baf0e61ee717878d390022ee1b713d70b244d69efbf3325680f9749"}, + {file = "fastavro-1.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c562fcf8f5091a2446aafd0c2a0da590c24e0b53527a0100d33908e32f20eea8"}, + {file = "fastavro-1.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2aa0111e7ebd076d2a094862bbdf8ea175cebba148fcce6c89ff46b625e334b4"}, + {file = "fastavro-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:652072e0f455ca19a1ee502b527e603389783657c130d81f89df66775979d6f5"}, + {file = "fastavro-1.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0a57cdd4edaee36d4216faf801ebc7f53f45e4e1518bdd9832d6f6f1d6e2d88f"}, + {file = "fastavro-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b46a18ebed61573b0823c28eda2716485d283258a83659c7fe6ad3aaeacfed4"}, + {file = "fastavro-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f756f0723f3bd97db20437d0a8e45712839e6ccd7c82f4d82469533be48b4c7"}, + {file = "fastavro-1.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d98d5a08063f5b6d7ac5016a0dfe0698b50d9987cb74686f7dfa8288b7b09e0b"}, + {file = "fastavro-1.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:00698e60db58a2d52cb709df882d451fb7664ebb2f8cb37d9171697e060dc767"}, + {file = "fastavro-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:d021bbc135023194688e88a7431fb0b5e3ce20e27153bf258f2ce08ee1a0106b"}, + {file = "fastavro-1.9.3.tar.gz", hash = "sha256:a30d3d2353f6d3b4f6dcd6a97ae937b3775faddd63f5856fe11ba3b0dbb1756a"}, ] [package.extras] @@ -4182,4 +4182,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "6ebb619f12461b94abd8264a577a2484dc2069b5f37ac65dfaba457b61b3dfa3" +content-hash = "2ddd248af109304f36e8290cea198df3cf22947197c1742d3bd808f6b2b0c938" diff --git a/pyproject.toml b/pyproject.toml index 38d1287f49..4d722c1dd6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ pytest = "7.4.4" pytest-checkdocs = "2.10.1" pytest-lazy-fixture = "0.6.3" pre-commit = "3.5.0" -fastavro = "1.9.2" +fastavro = "1.9.3" coverage = { version = "^7.4.0", extras = ["toml"] } requests-mock = "1.11.0" moto = { version = "^4.2.13", extras = ["server"] } From 0c3fc4d13646175ce48eae27997272b46f632961 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Wed, 10 Jan 2024 12:48:01 -0500 Subject: [PATCH 014/169] Cast env var `s3.connect-timeout` to float (#259) --- pyiceberg/io/fsspec.py | 2 +- pyiceberg/io/pyarrow.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyiceberg/io/fsspec.py b/pyiceberg/io/fsspec.py index cfb14f240f..6887007c7f 100644 --- a/pyiceberg/io/fsspec.py +++ b/pyiceberg/io/fsspec.py @@ -130,7 +130,7 @@ def _s3(properties: Properties) -> AbstractFileSystem: config_kwargs["proxies"] = {"http": proxy_uri, "https": proxy_uri} if connect_timeout := properties.get(S3_CONNECT_TIMEOUT): - config_kwargs["connect_timeout"] = connect_timeout + config_kwargs["connect_timeout"] = float(connect_timeout) fs = S3FileSystem(client_kwargs=client_kwargs, config_kwargs=config_kwargs) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index f1551133be..9faf4cec56 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -321,7 +321,7 @@ def _initialize_fs(self, scheme: str, netloc: Optional[str] = None) -> FileSyste if scheme in {"s3", "s3a", "s3n"}: from pyarrow.fs import S3FileSystem - client_kwargs = { + client_kwargs: Dict[str, Any] = { "endpoint_override": self.properties.get(S3_ENDPOINT), "access_key": self.properties.get(S3_ACCESS_KEY_ID), "secret_key": self.properties.get(S3_SECRET_ACCESS_KEY), @@ -333,7 +333,7 @@ def _initialize_fs(self, scheme: str, netloc: Optional[str] = None) -> FileSyste client_kwargs["proxy_options"] = proxy_uri if connect_timeout := self.properties.get(S3_CONNECT_TIMEOUT): - client_kwargs["connect_timeout"] = connect_timeout + client_kwargs["connect_timeout"] = float(connect_timeout) return S3FileSystem(**client_kwargs) elif scheme == "hdfs": From 0055bd02167b39ebead0827edfc54935bde4b878 Mon Sep 17 00:00:00 2001 From: Andre Luis Anastacio Date: Thu, 11 Jan 2024 17:14:59 -0300 Subject: [PATCH 015/169] Update feature support documentation (#261) --- mkdocs/docs/feature-support.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mkdocs/docs/feature-support.md b/mkdocs/docs/feature-support.md index 9c466c9337..535b34e128 100644 --- a/mkdocs/docs/feature-support.md +++ b/mkdocs/docs/feature-support.md @@ -36,8 +36,8 @@ The goal is that the python library will provide a functional, performant subset | Create Table | X | X | | Rename Table | X | X | | Drop Table | X | X | -| Alter Table | X | | -| Set Table Properties | X | | +| Alter Table | X | X | +| Set Table Properties | X | X | | Create Namespace | X | X | | Drop Namespace | X | X | | Set Namespace Properties | X | X | From d9987d2897f3bd83e478c5afadb8e5e0b58c8c19 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Jan 2024 12:28:35 -0800 Subject: [PATCH 016/169] Build: Bump cython from 3.0.7 to 3.0.8 (#260) Bumps [cython](https://github.com/cython/cython) from 3.0.7 to 3.0.8. - [Release notes](https://github.com/cython/cython/releases) - [Changelog](https://github.com/cython/cython/blob/master/CHANGES.rst) - [Commits](https://github.com/cython/cython/compare/3.0.7...3.0.8) --- updated-dependencies: - dependency-name: cython dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 113 ++++++++++++++++++++++++++----------------------- pyproject.toml | 2 +- 2 files changed, 61 insertions(+), 54 deletions(-) diff --git a/poetry.lock b/poetry.lock index 03d755f88b..3a22111bd7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -745,62 +745,69 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "cython" -version = "3.0.7" +version = "3.0.8" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "Cython-3.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3c0e19bb41de6be9d8afc85795159ca16296be81a586cd9588be0400d44a855"}, - {file = "Cython-3.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e8bf00ec1dd1d92e9ae74d2e6891f087a939e1dfb40c9c7fa5d8d6a26c94f5a"}, - {file = "Cython-3.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd6ae43ef2e596c9a88dbf2a8895be2e32cc2f5bc3c8ba2e7753b69068fc0b2d"}, - {file = "Cython-3.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f674be92673e87dd8ee7cfe553d5960ec4effc5ab15063b9a5e265a51585a31a"}, - {file = "Cython-3.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:861cf254bf5836d47c2aee86aa75dd93d3de00ccd1b077c3c7a2bb22cba358e7"}, - {file = "Cython-3.0.7-cp310-cp310-win32.whl", hash = "sha256:f6d8ff62ad55dc0393686438eac4b457a916e4d1118a0b550746bb52b4c756cc"}, - {file = "Cython-3.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:e13abb14843397b76d0472c7d33cd260d5f262ab05cc27ed423317e645e29643"}, - {file = "Cython-3.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c636c9ab92c7838231a1ba769e519d953af8294612f3f772a54d3a5250ff23f"}, - {file = "Cython-3.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22d2a684122dfb531853d57c8c85c1d5d44be709e12466dca99fa6aee7d8054f"}, - {file = "Cython-3.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1bdf8a107fdf9e174991aa87a0be7504f60de1ec6bfb1ccfb30e33acac818a0"}, - {file = "Cython-3.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3a83e04fde663b84905f3a20213a4333d13a07b79434300704b70dc552761f8b"}, - {file = "Cython-3.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e34b4b08d795ccca920fa26b099558f4f1e4e3f794e4ba8d3433c5bc2454d50a"}, - {file = "Cython-3.0.7-cp311-cp311-win32.whl", hash = "sha256:133057ac45b6fa7fe5d7baada9d3545d09339432f75c0545f556e8c6fecc2932"}, - {file = "Cython-3.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:b65abca78aa5ebc8675c8480b9a53006f6efea9910ad099cf32c9fb5617ef251"}, - {file = "Cython-3.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ceac5315fe899c229e874328742154e331fa41337bb03f6f5264636c351c9e"}, - {file = "Cython-3.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea936cf5931297ba07bce121388c4c6266c1b63a9f4d648ae16c92ff090204b"}, - {file = "Cython-3.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fcd9a18ee3ac7f460e0841954feb495102ffbdbec0e6c78562f3495cda000dd"}, - {file = "Cython-3.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7c8d579d13cb81abe704c8b0908d122b81d6e2623265a19c4a6a7377f440debb"}, - {file = "Cython-3.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ef5bb0268bfe5992da3ef9292463a5a895ed8700b134ed2c00008d5471b3ba6e"}, - {file = "Cython-3.0.7-cp312-cp312-win32.whl", hash = "sha256:55f93d3822bc196b37a8bdfa4ec6a35232a399e97f2baa714bd5ed8ea9b0ce68"}, - {file = "Cython-3.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:f3845c4506e0d207c5e268fb02813928f3a1e135de954a379f165ef0d581da47"}, - {file = "Cython-3.0.7-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ad7c2303a338b2c0b6c6c68f101a6768725934538756096cf3388a5c07a7525"}, - {file = "Cython-3.0.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed25959e4025870fdde5f895fcb126196d22affd4f4fad85a2823e0dddc85b0"}, - {file = "Cython-3.0.7-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79868ec74e4907a8a6e63effe13547c6157f196a162920b1de066da5849ffb8e"}, - {file = "Cython-3.0.7-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5e3a038332973b12e72236e8884dc99601a840334c2c46cfbbb5851cb94166eb"}, - {file = "Cython-3.0.7-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:f2602a5c97a3d618b3b847514204ef3349fb414c59e1126c0c2c708d2c5680f8"}, - {file = "Cython-3.0.7-cp36-cp36m-win32.whl", hash = "sha256:539ad5a21141e6420035cf616bcba48d999bf878839e52692f97fc7e2f16265c"}, - {file = "Cython-3.0.7-cp36-cp36m-win_amd64.whl", hash = "sha256:848a28ea49166454c3bff927e5a47629eecf1aa755d6fb3290569cba0fc93766"}, - {file = "Cython-3.0.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82f27a0134fc6bb46032ca5f728d8af984f3be94a3cb01cb70ff1224e551b9cf"}, - {file = "Cython-3.0.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f20c61114c7948cf1214585066406cef4b54a9b935160980e0b6e70ada3a69"}, - {file = "Cython-3.0.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d51709e10ad6213b4bf094af7be7ff82bab43216b3c92a07d05b451deeca79"}, - {file = "Cython-3.0.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3f02c7240abab48d59f0d5fef7064f18f01a2a204616165fa6367a8abf5a8832"}, - {file = "Cython-3.0.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:225f8bba6428b8d711ca2d6c738d2e3a4667f6a2ae40f8a7a5256f69f6a3600e"}, - {file = "Cython-3.0.7-cp37-cp37m-win32.whl", hash = "sha256:30eb2d2938b9195e2c82951713429aff3ad1be9f104437d1536a04eb0cb3dc0e"}, - {file = "Cython-3.0.7-cp37-cp37m-win_amd64.whl", hash = "sha256:167b3f3894dcc697cefefac1d198304fae8eb4d5860a7b8bc2459d572e838470"}, - {file = "Cython-3.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c67105f2c6ccf5b3adbcfaecf3c5c9fa8940f9f97955c9ad7d2542151d97d93"}, - {file = "Cython-3.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a1859af761977530df2cd5c36e31d54e8d6708ad2c4656e7125c482364dc216"}, - {file = "Cython-3.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01b94304aab87496e81d1f546e71abf57b430b39be4269df1cd7da9928d70b5b"}, - {file = "Cython-3.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:931aade65f77cf59f2a702ac1f549a4836ce221107c740502cbad18d6d8e9511"}, - {file = "Cython-3.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:812b193c26553f1f375d4f1c50f805c227b24ed2d595bc9cdaf78c992ecc64a4"}, - {file = "Cython-3.0.7-cp38-cp38-win32.whl", hash = "sha256:b227643d8a40b68554dc7d37fcd03fc97b4fb0bd2614aeb5f2e07ab244642d36"}, - {file = "Cython-3.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:0d8a98c7d86ac4d05b251c39faf49423780381aab55fbf2e147f6e006a34a58a"}, - {file = "Cython-3.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:816f5285d596062c7ef22790de7d75354b58d4417a9fc64cba914aeeb900db0b"}, - {file = "Cython-3.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d0dae6dccd349b8ccf197c10ef2d05c711ca36a649c7eddbab1de2c90b63a1"}, - {file = "Cython-3.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13211b67b29f6ed8e87c137496c73d93aff0330d97940b4fbed72eae37a4a2a0"}, - {file = "Cython-3.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b1853bc34ced5ff6473e881fcf6de29da83262552c8f268a0df53b49c2b89e2c"}, - {file = "Cython-3.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:51e8164b1270625ff101e95c3c1c234421520c07a0a3a20ded9e9431d98afce7"}, - {file = "Cython-3.0.7-cp39-cp39-win32.whl", hash = "sha256:45319d2471f4dbf19893ca53785a421107266e18b8cccd2054fce1e3f72a85f1"}, - {file = "Cython-3.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:612d83fd1eb5aaa5401a755c1f1aafacd9dab404cd350b90d5f404c98b33e4b3"}, - {file = "Cython-3.0.7-py2.py3-none-any.whl", hash = "sha256:936ec37b261b226d7404eff23a9aad284098338150d42a53d6a9af12b18d3892"}, - {file = "Cython-3.0.7.tar.gz", hash = "sha256:fb299acf3a578573c190c858d49e0cf9d75f4bc49c3f24c5a63804997ef09213"}, + {file = "Cython-3.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a846e0a38e2b24e9a5c5dc74b0e54c6e29420d88d1dafabc99e0fc0f3e338636"}, + {file = "Cython-3.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45523fdc2b78d79b32834cc1cc12dc2ca8967af87e22a3ee1bff20e77c7f5520"}, + {file = "Cython-3.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa0b7f3f841fe087410cab66778e2d3fb20ae2d2078a2be3dffe66c6574be39"}, + {file = "Cython-3.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e87294e33e40c289c77a135f491cd721bd089f193f956f7b8ed5aa2d0b8c558f"}, + {file = "Cython-3.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a1df7a129344b1215c20096d33c00193437df1a8fcca25b71f17c23b1a44f782"}, + {file = "Cython-3.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:13c2a5e57a0358da467d97667297bf820b62a1a87ae47c5f87938b9bb593acbd"}, + {file = "Cython-3.0.8-cp310-cp310-win32.whl", hash = "sha256:96b028f044f5880e3cb18ecdcfc6c8d3ce9d0af28418d5ab464509f26d8adf12"}, + {file = "Cython-3.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:8140597a8b5cc4f119a1190f5a2228a84f5ca6d8d9ec386cfce24663f48b2539"}, + {file = "Cython-3.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aae26f9663e50caf9657148403d9874eea41770ecdd6caf381d177c2b1bb82ba"}, + {file = "Cython-3.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:547eb3cdb2f8c6f48e6865d5a741d9dd051c25b3ce076fbca571727977b28ac3"}, + {file = "Cython-3.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a567d4b9ba70b26db89d75b243529de9e649a2f56384287533cf91512705bee"}, + {file = "Cython-3.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51d1426263b0e82fb22bda8ea60dc77a428581cc19e97741011b938445d383f1"}, + {file = "Cython-3.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c26daaeccda072459b48d211415fd1e5507c06bcd976fa0d5b8b9f1063467d7b"}, + {file = "Cython-3.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:289ce7838208211cd166e975865fd73b0649bf118170b6cebaedfbdaf4a37795"}, + {file = "Cython-3.0.8-cp311-cp311-win32.whl", hash = "sha256:c8aa05f5e17f8042a3be052c24f2edc013fb8af874b0bf76907d16c51b4e7871"}, + {file = "Cython-3.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:000dc9e135d0eec6ecb2b40a5b02d0868a2f8d2e027a41b0fe16a908a9e6de02"}, + {file = "Cython-3.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90d3fe31db55685d8cb97d43b0ec39ef614fcf660f83c77ed06aa670cb0e164f"}, + {file = "Cython-3.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e24791ddae2324e88e3c902a765595c738f19ae34ee66bfb1a6dac54b1833419"}, + {file = "Cython-3.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f020fa1c0552052e0660790b8153b79e3fc9a15dbd8f1d0b841fe5d204a6ae6"}, + {file = "Cython-3.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18bfa387d7a7f77d7b2526af69a65dbd0b731b8d941aaff5becff8e21f6d7717"}, + {file = "Cython-3.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fe81b339cffd87c0069c6049b4d33e28bdd1874625ee515785bf42c9fdff3658"}, + {file = "Cython-3.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80fd94c076e1e1b1ee40a309be03080b75f413e8997cddcf401a118879863388"}, + {file = "Cython-3.0.8-cp312-cp312-win32.whl", hash = "sha256:85077915a93e359a9b920280d214dc0cf8a62773e1f3d7d30fab8ea4daed670c"}, + {file = "Cython-3.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:0cb2dcc565c7851f75d496f724a384a790fab12d1b82461b663e66605bec429a"}, + {file = "Cython-3.0.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:870d2a0a7e3cbd5efa65aecdb38d715ea337a904ea7bb22324036e78fb7068e7"}, + {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e8f2454128974905258d86534f4fd4f91d2f1343605657ecab779d80c9d6d5e"}, + {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1949d6aa7bc792554bee2b67a9fe41008acbfe22f4f8df7b6ec7b799613a4b3"}, + {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9f2c6e1b8f3bcd6cb230bac1843f85114780bb8be8614855b1628b36bb510e0"}, + {file = "Cython-3.0.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:05d7eddc668ae7993643f32c7661f25544e791edb745758672ea5b1a82ecffa6"}, + {file = "Cython-3.0.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bfabe115deef4ada5d23c87bddb11289123336dcc14347011832c07db616dd93"}, + {file = "Cython-3.0.8-cp36-cp36m-win32.whl", hash = "sha256:0c38c9f0bcce2df0c3347285863621be904ac6b64c5792d871130569d893efd7"}, + {file = "Cython-3.0.8-cp36-cp36m-win_amd64.whl", hash = "sha256:6c46939c3983217d140999de7c238c3141f56b1ea349e47ca49cae899969aa2c"}, + {file = "Cython-3.0.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:115f0a50f752da6c99941b103b5cb090da63eb206abbc7c2ad33856ffc73f064"}, + {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c0f29246734561c90f36e70ed0506b61aa3d044e4cc4cba559065a2a741fae"}, + {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ab75242869ff71e5665fe5c96f3378e79e792fa3c11762641b6c5afbbbbe026"}, + {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6717c06e9cfc6c1df18543cd31a21f5d8e378a40f70c851fa2d34f0597037abc"}, + {file = "Cython-3.0.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9d3f74388db378a3c6fd06e79a809ed98df3f56484d317b81ee762dbf3c263e0"}, + {file = "Cython-3.0.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae7ac561fd8253a9ae96311e91d12af5f701383564edc11d6338a7b60b285a6f"}, + {file = "Cython-3.0.8-cp37-cp37m-win32.whl", hash = "sha256:97b2a45845b993304f1799664fa88da676ee19442b15fdcaa31f9da7e1acc434"}, + {file = "Cython-3.0.8-cp37-cp37m-win_amd64.whl", hash = "sha256:9e2be2b340fea46fb849d378f9b80d3c08ff2e81e2bfbcdb656e2e3cd8c6b2dc"}, + {file = "Cython-3.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2cde23c555470db3f149ede78b518e8274853745289c956a0e06ad8d982e4db9"}, + {file = "Cython-3.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7990ca127e1f1beedaf8fc8bf66541d066ef4723ad7d8d47a7cbf842e0f47580"}, + {file = "Cython-3.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b983c8e6803f016146c26854d9150ddad5662960c804ea7f0c752c9266752f0"}, + {file = "Cython-3.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a973268d7ca1a2bdf78575e459a94a78e1a0a9bb62a7db0c50041949a73b02ff"}, + {file = "Cython-3.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:61a237bc9dd23c7faef0fcfce88c11c65d0c9bb73c74ccfa408b3a012073c20e"}, + {file = "Cython-3.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a3d67f079598af49e90ff9655bf85bd358f093d727eb21ca2708f467c489cae"}, + {file = "Cython-3.0.8-cp38-cp38-win32.whl", hash = "sha256:17a642bb01a693e34c914106566f59844b4461665066613913463a719e0dd15d"}, + {file = "Cython-3.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:2cdfc32252f3b6dc7c94032ab744dcedb45286733443c294d8f909a4854e7f83"}, + {file = "Cython-3.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa97893d99385386925d00074654aeae3a98867f298d1e12ceaf38a9054a9bae"}, + {file = "Cython-3.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05c0bf9d085c031df8f583f0d506aa3be1692023de18c45d0aaf78685bbb944"}, + {file = "Cython-3.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de892422582f5758bd8de187e98ac829330ec1007bc42c661f687792999988a7"}, + {file = "Cython-3.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:314f2355a1f1d06e3c431eaad4708cf10037b5e91e4b231d89c913989d0bdafd"}, + {file = "Cython-3.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:78825a3774211e7d5089730f00cdf7f473042acc9ceb8b9eeebe13ed3a5541de"}, + {file = "Cython-3.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:df8093deabc55f37028190cf5e575c26aad23fc673f34b85d5f45076bc37ce39"}, + {file = "Cython-3.0.8-cp39-cp39-win32.whl", hash = "sha256:1aca1b97e0095b3a9a6c33eada3f661a4ed0d499067d121239b193e5ba3bb4f0"}, + {file = "Cython-3.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:16873d78be63bd38ffb759da7ab82814b36f56c769ee02b1d5859560e4c3ac3c"}, + {file = "Cython-3.0.8-py2.py3-none-any.whl", hash = "sha256:171b27051253d3f9108e9759e504ba59ff06e7f7ba944457f94deaf9c21bf0b6"}, + {file = "Cython-3.0.8.tar.gz", hash = "sha256:8333423d8fd5765e7cceea3a9985dd1e0a5dfeb2734629e1a2ed2d6233d39de6"}, ] [[package]] @@ -4182,4 +4189,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "2ddd248af109304f36e8290cea198df3cf22947197c1742d3bd808f6b2b0c938" +content-hash = "54d7c0ea7f06959ce2fb8c4c36b595881e56c59dc1e97bbc4a859bcc25dac542" diff --git a/pyproject.toml b/pyproject.toml index 4d722c1dd6..63299413d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ requests-mock = "1.11.0" moto = { version = "^4.2.13", extras = ["server"] } typing-extensions = "4.9.0" pytest-mock = "3.12.0" -cython = "3.0.7" +cython = "3.0.8" [[tool.mypy.overrides]] module = "pytest_mock.*" From 7e0d77ea4c784a7666dfca823b424dcd97436036 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Jan 2024 12:52:58 -0800 Subject: [PATCH 017/169] Build: Bump jinja2 from 3.1.2 to 3.1.3 in /mkdocs (#263) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.2 to 3.1.3. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.2...3.1.3) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 8b128bdfef..4e5cbe314a 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -17,7 +17,7 @@ mkdocs==1.5.3 griffe==0.38.1 -jinja2==3.1.2 +jinja2==3.1.3 mkdocstrings==0.24.0 mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 From 5085d28ccf2a2f8eac2789098e7b86628f9c9137 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Jan 2024 12:53:18 -0800 Subject: [PATCH 018/169] Build: Bump jinja2 from 3.1.2 to 3.1.3 (#264) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.2 to 3.1.3. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.2...3.1.3) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> From 4593208f67e1101a94059a93bfb13f609ac2ad80 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 12 Jan 2024 14:37:52 -0800 Subject: [PATCH 019/169] Arrow: Set field-id with prefix (#227) 'PARQUET:' prefix is specific to Parquet, with 'PARQUET:field_id' setting the 'field_id'. Removed the non-prefixed alternative for 'field_id'. Removed the prefixed alternative for 'doc'. --- pyiceberg/io/pyarrow.py | 29 +++++++---------- tests/io/test_pyarrow.py | 54 ++++++++++++++++---------------- tests/io/test_pyarrow_visitor.py | 12 +++---- 3 files changed, 45 insertions(+), 50 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 9faf4cec56..e116cd0a38 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -156,10 +156,9 @@ ONE_MEGABYTE = 1024 * 1024 BUFFER_SIZE = "buffer-size" ICEBERG_SCHEMA = b"iceberg.schema" -FIELD_ID = "field_id" -DOC = "doc" -PYARROW_FIELD_ID_KEYS = [b"PARQUET:field_id", b"field_id"] -PYARROW_FIELD_DOC_KEYS = [b"PARQUET:field_doc", b"field_doc", b"doc"] +# The PARQUET: in front means that it is Parquet specific, in this case the field_id +PYARROW_PARQUET_FIELD_ID_KEY = b"PARQUET:field_id" +PYARROW_FIELD_DOC_KEY = b"doc" T = TypeVar("T") @@ -461,7 +460,9 @@ def field(self, field: NestedField, field_result: pa.DataType) -> pa.Field: name=field.name, type=field_result, nullable=field.optional, - metadata={DOC: field.doc, FIELD_ID: str(field.field_id)} if field.doc else {FIELD_ID: str(field.field_id)}, + metadata={PYARROW_FIELD_DOC_KEY: field.doc, PYARROW_PARQUET_FIELD_ID_KEY: str(field.field_id)} + if field.doc + else {PYARROW_PARQUET_FIELD_ID_KEY: str(field.field_id)}, ) def list(self, list_type: ListType, element_result: pa.DataType) -> pa.DataType: @@ -725,17 +726,11 @@ def primitive(self, primitive: pa.DataType) -> Optional[T]: def _get_field_id(field: pa.Field) -> Optional[int]: - for pyarrow_field_id_key in PYARROW_FIELD_ID_KEYS: - if field_id_str := field.metadata.get(pyarrow_field_id_key): - return int(field_id_str.decode()) - return None - - -def _get_field_doc(field: pa.Field) -> Optional[str]: - for pyarrow_doc_key in PYARROW_FIELD_DOC_KEYS: - if doc_str := field.metadata.get(pyarrow_doc_key): - return doc_str.decode() - return None + return ( + int(field_id_str.decode()) + if (field.metadata and (field_id_str := field.metadata.get(PYARROW_PARQUET_FIELD_ID_KEY))) + else None + ) class _ConvertToIceberg(PyArrowSchemaVisitor[Union[IcebergType, Schema]]): @@ -743,7 +738,7 @@ def _convert_fields(self, arrow_fields: Iterable[pa.Field], field_results: List[ fields = [] for i, field in enumerate(arrow_fields): field_id = _get_field_id(field) - field_doc = _get_field_doc(field) + field_doc = doc_str.decode() if (field.metadata and (doc_str := field.metadata.get(PYARROW_FIELD_DOC_KEY))) else None field_type = field_results[i] if field_type is not None and field_id is not None: fields.append(NestedField(field_id, field.name, field_type, required=not field.nullable, doc=field_doc)) diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index 7a1868f92a..5efeb42ed8 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -324,57 +324,57 @@ def test_schema_to_pyarrow_schema(table_schema_nested: Schema) -> None: actual = schema_to_pyarrow(table_schema_nested) expected = """foo: string -- field metadata -- - field_id: '1' + PARQUET:field_id: '1' bar: int32 not null -- field metadata -- - field_id: '2' + PARQUET:field_id: '2' baz: bool -- field metadata -- - field_id: '3' + PARQUET:field_id: '3' qux: list not null child 0, element: string not null -- field metadata -- - field_id: '5' + PARQUET:field_id: '5' -- field metadata -- - field_id: '4' + PARQUET:field_id: '4' quux: map> not null child 0, entries: struct not null> not null child 0, key: string not null -- field metadata -- - field_id: '7' + PARQUET:field_id: '7' child 1, value: map not null child 0, entries: struct not null child 0, key: string not null -- field metadata -- - field_id: '9' + PARQUET:field_id: '9' child 1, value: int32 not null -- field metadata -- - field_id: '10' + PARQUET:field_id: '10' -- field metadata -- - field_id: '8' + PARQUET:field_id: '8' -- field metadata -- - field_id: '6' + PARQUET:field_id: '6' location: list not null> not null child 0, element: struct not null child 0, latitude: float -- field metadata -- - field_id: '13' + PARQUET:field_id: '13' child 1, longitude: float -- field metadata -- - field_id: '14' + PARQUET:field_id: '14' -- field metadata -- - field_id: '12' + PARQUET:field_id: '12' -- field metadata -- - field_id: '11' + PARQUET:field_id: '11' person: struct child 0, name: string -- field metadata -- - field_id: '16' + PARQUET:field_id: '16' child 1, age: int32 not null -- field metadata -- - field_id: '17' + PARQUET:field_id: '17' -- field metadata -- - field_id: '15'""" + PARQUET:field_id: '15'""" assert repr(actual) == expected @@ -888,22 +888,22 @@ def test_projection_add_column(file_int: str) -> None: list: list child 0, element: int32 -- field metadata -- - field_id: '21' + PARQUET:field_id: '21' map: map child 0, entries: struct not null child 0, key: int32 not null -- field metadata -- - field_id: '31' + PARQUET:field_id: '31' child 1, value: string -- field metadata -- - field_id: '32' + PARQUET:field_id: '32' location: struct child 0, lat: double -- field metadata -- - field_id: '41' + PARQUET:field_id: '41' child 1, lon: double -- field metadata -- - field_id: '42'""" + PARQUET:field_id: '42'""" ) @@ -953,10 +953,10 @@ def test_projection_add_column_struct(schema_int: Schema, file_int: str) -> None child 0, entries: struct not null child 0, key: int32 not null -- field metadata -- - field_id: '3' + PARQUET:field_id: '3' child 1, value: string -- field metadata -- - field_id: '4'""" + PARQUET:field_id: '4'""" ) @@ -1004,7 +1004,7 @@ def test_projection_filter(schema_int: Schema, file_int: str) -> None: repr(result_table.schema) == """id: int32 -- field metadata -- - field_id: '1'""" + PARQUET:field_id: '1'""" ) @@ -1182,10 +1182,10 @@ def test_projection_nested_struct_different_parent_id(file_struct: str) -> None: == """location: struct child 0, lat: double -- field metadata -- - field_id: '41' + PARQUET:field_id: '41' child 1, long: double -- field metadata -- - field_id: '42'""" + PARQUET:field_id: '42'""" ) diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index 786b5ea1d3..c307440352 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -208,9 +208,9 @@ def test_pyarrow_variable_binary_to_iceberg() -> None: def test_pyarrow_struct_to_iceberg() -> None: pyarrow_struct = pa.struct([ - pa.field("foo", pa.string(), nullable=True, metadata={"field_id": "1", "doc": "foo doc"}), - pa.field("bar", pa.int32(), nullable=False, metadata={"field_id": "2"}), - pa.field("baz", pa.bool_(), nullable=True, metadata={"field_id": "3"}), + pa.field("foo", pa.string(), nullable=True, metadata={"PARQUET:field_id": "1", "doc": "foo doc"}), + pa.field("bar", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "2"}), + pa.field("baz", pa.bool_(), nullable=True, metadata={"PARQUET:field_id": "3"}), ]) expected = StructType( NestedField(field_id=1, name="foo", field_type=StringType(), required=False, doc="foo doc"), @@ -221,7 +221,7 @@ def test_pyarrow_struct_to_iceberg() -> None: def test_pyarrow_list_to_iceberg() -> None: - pyarrow_list = pa.list_(pa.field("element", pa.int32(), nullable=False, metadata={"field_id": "1"})) + pyarrow_list = pa.list_(pa.field("element", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "1"})) expected = ListType( element_id=1, element_type=IntegerType(), @@ -232,8 +232,8 @@ def test_pyarrow_list_to_iceberg() -> None: def test_pyarrow_map_to_iceberg() -> None: pyarrow_map = pa.map_( - pa.field("key", pa.int32(), nullable=False, metadata={"field_id": "1"}), - pa.field("value", pa.string(), nullable=False, metadata={"field_id": "2"}), + pa.field("key", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "1"}), + pa.field("value", pa.string(), nullable=False, metadata={"PARQUET:field_id": "2"}), ) expected = MapType( key_id=1, From 979736045f440c91020990106572ef9fc25c0f9f Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Tue, 16 Jan 2024 07:30:24 -0500 Subject: [PATCH 020/169] Rest Catalog Support for a Separate OAuth Server URI (#233) * support separate auth url * Remove debug line --------- Co-authored-by: Fokko Driesprong --- mkdocs/docs/configuration.md | 17 +++++++++-------- pyiceberg/catalog/rest.py | 11 +++++++++-- tests/catalog/test_rest.py | 25 ++++++++++++++++++++++++- 3 files changed, 42 insertions(+), 11 deletions(-) diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index 801bb8fc87..12bb35181b 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -130,14 +130,15 @@ catalog: cabundle: /absolute/path/to/cabundle.pem ``` -| Key | Example | Description | -| ------------------- | ----------------------- | -------------------------------------------------------------------------- | -| uri | https://rest-catalog/ws | URI identifying the REST Server | -| credential | t-1234:secret | Credential to use for OAuth2 credential flow when initializing the catalog | -| token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | -| rest.sigv4-enabled | true | Sign requests to the REST Server using AWS SigV4 protocol | -| rest.signing-region | us-east-1 | The region to use when SigV4 signing a request | -| rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | +| Key | Example | Description | +| ---------------------- | ----------------------- | -------------------------------------------------------------------------------------------------- | +| uri | https://rest-catalog/ws | URI identifying the REST Server | +| credential | t-1234:secret | Credential to use for OAuth2 credential flow when initializing the catalog | +| token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | +| rest.sigv4-enabled | true | Sign requests to the REST Server using AWS SigV4 protocol | +| rest.signing-region | us-east-1 | The region to use when SigV4 signing a request | +| rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | +| rest.authorization-url | https://auth-service/cc | Authentication URL to use for client credentials authentication (default: uri + 'v1/oauth/tokens') | ## SQL Catalog diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 3dbb5b72a9..de192a9e0b 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -109,6 +109,7 @@ class Endpoints: SIGV4 = "rest.sigv4-enabled" SIGV4_REGION = "rest.signing-region" SIGV4_SERVICE = "rest.signing-name" +AUTH_URL = "rest.authorization-url" NAMESPACE_SEPARATOR = b"\x1F".decode(UTF8) @@ -265,15 +266,21 @@ def url(self, endpoint: str, prefixed: bool = True, **kwargs: Any) -> str: return url + endpoint.format(**kwargs) + @property + def auth_url(self) -> str: + if url := self.properties.get(AUTH_URL): + return url + else: + return self.url(Endpoints.get_token, prefixed=False) + def _fetch_access_token(self, session: Session, credential: str) -> str: if SEMICOLON in credential: client_id, client_secret = credential.split(SEMICOLON) else: client_id, client_secret = None, credential data = {GRANT_TYPE: CLIENT_CREDENTIALS, CLIENT_ID: client_id, CLIENT_SECRET: client_secret, SCOPE: CATALOG_SCOPE} - url = self.url(Endpoints.get_token, prefixed=False) # Uses application/x-www-form-urlencoded by default - response = session.post(url=url, data=data) + response = session.post(url=self.auth_url, data=data) try: response.raise_for_status() except HTTPError as exc: diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 79cf25f87a..248cc14d88 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -24,7 +24,7 @@ import pyiceberg from pyiceberg.catalog import PropertiesUpdateSummary, Table, load_catalog -from pyiceberg.catalog.rest import RestCatalog +from pyiceberg.catalog.rest import AUTH_URL, RestCatalog from pyiceberg.exceptions import ( NamespaceAlreadyExistsError, NoSuchNamespaceError, @@ -43,6 +43,7 @@ TEST_URI = "https://iceberg-test-catalog/" TEST_CREDENTIALS = "client:secret" +TEST_AUTH_URL = "https://auth-endpoint/" TEST_TOKEN = "some_jwt_token" TEST_HEADERS = { "Content-type": "application/json", @@ -116,6 +117,28 @@ def test_token_200(rest_mock: Mocker) -> None: ) +def test_token_200_w_auth_url(rest_mock: Mocker) -> None: + rest_mock.post( + TEST_AUTH_URL, + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + # pylint: disable=W0212 + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS, **{AUTH_URL: TEST_AUTH_URL})._session.headers[ + "Authorization" + ] + == f"Bearer {TEST_TOKEN}" + ) + # pylint: enable=W0212 + + def test_config_200(requests_mock: Mocker) -> None: requests_mock.get( f"{TEST_URI}v1/config", From d047f1bbafa05bd65e5908e78737dd4e76218834 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 09:50:45 -0800 Subject: [PATCH 021/169] Build: Bump mkdocs-material from 9.5.3 to 9.5.4 (#267) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.3 to 9.5.4. --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 4e5cbe314a..0af1661e2f 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -23,6 +23,6 @@ mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 -mkdocs-material==9.5.3 +mkdocs-material==9.5.4 mkdocs-material-extensions==1.3.1 mkdocs-section-index==0.3.8 From 3085c404c99ba8c5c8856f21a6c8d63a12ca0113 Mon Sep 17 00:00:00 2001 From: Honah J Date: Tue, 16 Jan 2024 10:06:20 -0800 Subject: [PATCH 022/169] Table Metadata Update: Support SetPropertiesUpdate and RemovePropertiesUpdate (#266) * Support table properties update * Add test for glue catalog --- pyiceberg/table/__init__.py | 25 ++++++++++++ tests/catalog/integration_test_glue.py | 17 +++++++++ tests/catalog/test_glue.py | 22 +++++++++++ tests/table/test_init.py | 53 ++++++++++++++++++++++++++ 4 files changed, 117 insertions(+) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index ebeaa19b54..5292c5182c 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -415,6 +415,31 @@ def _(update: UpgradeFormatVersionUpdate, base_metadata: TableMetadata, context: return TableMetadataUtil.parse_obj(updated_metadata_data) +@_apply_table_update.register(SetPropertiesUpdate) +def _(update: SetPropertiesUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + if len(update.updates) == 0: + return base_metadata + + properties = dict(base_metadata.properties) + properties.update(update.updates) + + context.add_update(update) + return base_metadata.model_copy(update={"properties": properties}) + + +@_apply_table_update.register(RemovePropertiesUpdate) +def _(update: RemovePropertiesUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + if len(update.removals) == 0: + return base_metadata + + properties = dict(base_metadata.properties) + for key in update.removals: + properties.pop(key) + + context.add_update(update) + return base_metadata.model_copy(update={"properties": properties}) + + @_apply_table_update.register(AddSchemaUpdate) def _(update: AddSchemaUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: if update.last_column_id < base_metadata.last_column_id: diff --git a/tests/catalog/integration_test_glue.py b/tests/catalog/integration_test_glue.py index 99f0adac40..24401cae39 100644 --- a/tests/catalog/integration_test_glue.py +++ b/tests/catalog/integration_test_glue.py @@ -294,3 +294,20 @@ def test_commit_table_update_schema( assert new_schema assert new_schema == update._apply() assert new_schema.find_field("b").field_type == IntegerType() + + +def test_commit_table_properties(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str) -> None: + identifier = (database_name, table_name) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier=identifier, schema=table_schema_nested, properties={"test_a": "test_a"}) + + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + + transaction = table.transaction() + transaction.set_properties(test_a="test_aa", test_b="test_b", test_c="test_c") + transaction.remove_properties("test_b") + transaction.commit_transaction() + + updated_table_metadata = table.metadata + assert test_catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.properties == {"test_a": "test_aa", "test_c": "test_c"} diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index f84ed4ad20..bf6d11784f 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -553,3 +553,25 @@ def test_commit_table_update_schema( assert new_schema assert new_schema == update._apply() assert new_schema.find_field("b").field_type == IntegerType() + + +@mock_glue +def test_commit_table_properties( + _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + catalog_name = "glue" + identifier = (database_name, table_name) + test_catalog = GlueCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}"}) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier=identifier, schema=table_schema_nested, properties={"test_a": "test_a"}) + + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + + transaction = table.transaction() + transaction.set_properties(test_a="test_aa", test_b="test_b", test_c="test_c") + transaction.remove_properties("test_b") + transaction.commit_transaction() + + updated_table_metadata = table.metadata + assert test_catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.properties == {"test_a": "test_aa", "test_c": "test_c"} diff --git a/tests/table/test_init.py b/tests/table/test_init.py index 475814a051..d3bbe418c4 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -50,6 +50,7 @@ AssertLastAssignedPartitionId, AssertRefSnapshotId, AssertTableUUID, + RemovePropertiesUpdate, SetPropertiesUpdate, SetSnapshotRefUpdate, SnapshotRef, @@ -529,6 +530,51 @@ def test_add_nested_list_type_column(table_v2: Table) -> None: assert new_schema.highest_field_id == 7 +def test_apply_set_properties_update(table_v2: Table) -> None: + base_metadata = table_v2.metadata + + new_metadata_no_update = update_table_metadata(base_metadata, (SetPropertiesUpdate(updates={}),)) + assert new_metadata_no_update == base_metadata + + new_metadata = update_table_metadata( + base_metadata, (SetPropertiesUpdate(updates={"read.split.target.size": "123", "test_a": "test_a", "test_b": "test_b"}),) + ) + + assert base_metadata.properties == {"read.split.target.size": "134217728"} + assert new_metadata.properties == {"read.split.target.size": "123", "test_a": "test_a", "test_b": "test_b"} + + new_metadata_add_only = update_table_metadata(new_metadata, (SetPropertiesUpdate(updates={"test_c": "test_c"}),)) + + assert new_metadata_add_only.properties == { + "read.split.target.size": "123", + "test_a": "test_a", + "test_b": "test_b", + "test_c": "test_c", + } + + +def test_apply_remove_properties_update(table_v2: Table) -> None: + base_metadata = update_table_metadata( + table_v2.metadata, + (SetPropertiesUpdate(updates={"test_a": "test_a", "test_b": "test_b", "test_c": "test_c", "test_d": "test_d"}),), + ) + + new_metadata_no_removal = update_table_metadata(base_metadata, (RemovePropertiesUpdate(removals=[]),)) + + assert base_metadata == new_metadata_no_removal + + new_metadata = update_table_metadata(base_metadata, (RemovePropertiesUpdate(removals=["test_a", "test_c"]),)) + + assert base_metadata.properties == { + "read.split.target.size": "134217728", + "test_a": "test_a", + "test_b": "test_b", + "test_c": "test_c", + "test_d": "test_d", + } + assert new_metadata.properties == {"read.split.target.size": "134217728", "test_b": "test_b", "test_d": "test_d"} + + def test_apply_add_schema_update(table_v2: Table) -> None: transaction = table_v2.transaction() update = transaction.update_schema() @@ -625,6 +671,8 @@ def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: schema_update_1.add_column(path="b", field_type=IntegerType()) schema_update_1.commit() + transaction.set_properties(owner="test", test_a="test_a", test_b="test_b", test_c="test_c") + test_updates = transaction._updates # pylint: disable=W0212 new_snapshot = Snapshot( @@ -639,6 +687,7 @@ def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: test_updates += ( AddSnapshotUpdate(snapshot=new_snapshot), + SetPropertiesUpdate(updates={"test_a": "test_a1"}), SetSnapshotRefUpdate( ref_name="main", type="branch", @@ -647,6 +696,7 @@ def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: max_snapshot_age_ms=12312312312, min_snapshots_to_keep=1, ), + RemovePropertiesUpdate(removals=["test_c", "test_b"]), ) new_metadata = update_table_metadata(base_metadata, test_updates) @@ -681,6 +731,9 @@ def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: max_ref_age_ms=123123123, ) + # Set/RemovePropertiesUpdate + assert new_metadata.properties == {"owner": "test", "test_a": "test_a1"} + def test_metadata_isolation_from_illegal_updates(table_v1: Table) -> None: base_metadata = table_v1.metadata From 2d30119610e3938708b752ea6e0a976d3a139edc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 18:18:40 -0800 Subject: [PATCH 023/169] Build: Bump griffe from 0.38.1 to 0.39.0 (#272) Bumps [griffe](https://github.com/mkdocstrings/griffe) from 0.38.1 to 0.39.0. --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 0af1661e2f..cac7424d82 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -16,7 +16,7 @@ # under the License. mkdocs==1.5.3 -griffe==0.38.1 +griffe==0.39.0 jinja2==3.1.3 mkdocstrings==0.24.0 mkdocstrings-python==1.8.0 From 7deb739439b40ff7a7d82bac923e849630ffa92c Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Wed, 17 Jan 2024 07:36:24 -0500 Subject: [PATCH 024/169] Add SqlCatalog `_commit_table` support (#265) * sql commit * SqlCatalog _commit_table * better variable names * fallback to FOR UPDATE commit when engine.dialect.supports_sane_rowcount is False * remove stray print * wait * better logging --- pyiceberg/catalog/sql.py | 132 +++++++++++++++++++++++++++++++------- tests/catalog/test_sql.py | 55 ++++++++++++++++ 2 files changed, 165 insertions(+), 22 deletions(-) diff --git a/pyiceberg/catalog/sql.py b/pyiceberg/catalog/sql.py index 77ece56163..593c6b54a1 100644 --- a/pyiceberg/catalog/sql.py +++ b/pyiceberg/catalog/sql.py @@ -31,7 +31,7 @@ union, update, ) -from sqlalchemy.exc import IntegrityError, OperationalError +from sqlalchemy.exc import IntegrityError, NoResultFound, OperationalError from sqlalchemy.orm import ( DeclarativeBase, Mapped, @@ -48,6 +48,7 @@ PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( + CommitFailedException, NamespaceAlreadyExistsError, NamespaceNotEmptyError, NoSuchNamespaceError, @@ -59,7 +60,7 @@ from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.serializers import FromInputFile -from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table +from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, update_table_metadata from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT @@ -268,16 +269,32 @@ def drop_table(self, identifier: Union[str, Identifier]) -> None: identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) database_name, table_name = self.identifier_to_database_and_table(identifier_tuple, NoSuchTableError) with Session(self.engine) as session: - res = session.execute( - delete(IcebergTables).where( - IcebergTables.catalog_name == self.name, - IcebergTables.table_namespace == database_name, - IcebergTables.table_name == table_name, + if self.engine.dialect.supports_sane_rowcount: + res = session.execute( + delete(IcebergTables).where( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == database_name, + IcebergTables.table_name == table_name, + ) ) - ) + if res.rowcount < 1: + raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") + else: + try: + tbl = ( + session.query(IcebergTables) + .with_for_update(of=IcebergTables) + .filter( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == database_name, + IcebergTables.table_name == table_name, + ) + .one() + ) + session.delete(tbl) + except NoResultFound as e: + raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") from e session.commit() - if res.rowcount < 1: - raise NoSuchTableError(f"Table does not exist: {database_name}.{table_name}") def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: Union[str, Identifier]) -> Table: """Rename a fully classified table name. @@ -301,18 +318,35 @@ def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: U raise NoSuchNamespaceError(f"Namespace does not exist: {to_database_name}") with Session(self.engine) as session: try: - stmt = ( - update(IcebergTables) - .where( - IcebergTables.catalog_name == self.name, - IcebergTables.table_namespace == from_database_name, - IcebergTables.table_name == from_table_name, + if self.engine.dialect.supports_sane_rowcount: + stmt = ( + update(IcebergTables) + .where( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == from_database_name, + IcebergTables.table_name == from_table_name, + ) + .values(table_namespace=to_database_name, table_name=to_table_name) ) - .values(table_namespace=to_database_name, table_name=to_table_name) - ) - result = session.execute(stmt) - if result.rowcount < 1: - raise NoSuchTableError(f"Table does not exist: {from_table_name}") + result = session.execute(stmt) + if result.rowcount < 1: + raise NoSuchTableError(f"Table does not exist: {from_table_name}") + else: + try: + tbl = ( + session.query(IcebergTables) + .with_for_update(of=IcebergTables) + .filter( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == from_database_name, + IcebergTables.table_name == from_table_name, + ) + .one() + ) + tbl.table_namespace = to_database_name + tbl.table_name = to_table_name + except NoResultFound as e: + raise NoSuchTableError(f"Table does not exist: {from_table_name}") from e session.commit() except IntegrityError as e: raise TableAlreadyExistsError(f"Table {to_database_name}.{to_table_name} already exists") from e @@ -329,8 +363,62 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: If the commit failed. """ - raise NotImplementedError + identifier_tuple = self.identifier_to_tuple_without_catalog( + tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) + ) + current_table = self.load_table(identifier_tuple) + database_name, table_name = self.identifier_to_database_and_table(identifier_tuple, NoSuchTableError) + base_metadata = current_table.metadata + for requirement in table_request.requirements: + requirement.validate(base_metadata) + + updated_metadata = update_table_metadata(base_metadata, table_request.updates) + if updated_metadata == base_metadata: + # no changes, do nothing + return CommitTableResponse(metadata=base_metadata, metadata_location=current_table.metadata_location) + + # write new metadata + new_metadata_version = self._parse_metadata_version(current_table.metadata_location) + 1 + new_metadata_location = self._get_metadata_location(current_table.metadata.location, new_metadata_version) + self._write_metadata(updated_metadata, current_table.io, new_metadata_location) + + with Session(self.engine) as session: + if self.engine.dialect.supports_sane_rowcount: + stmt = ( + update(IcebergTables) + .where( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == database_name, + IcebergTables.table_name == table_name, + IcebergTables.metadata_location == current_table.metadata_location, + ) + .values(metadata_location=new_metadata_location, previous_metadata_location=current_table.metadata_location) + ) + result = session.execute(stmt) + if result.rowcount < 1: + raise CommitFailedException(f"Table has been updated by another process: {database_name}.{table_name}") + else: + try: + tbl = ( + session.query(IcebergTables) + .with_for_update(of=IcebergTables) + .filter( + IcebergTables.catalog_name == self.name, + IcebergTables.table_namespace == database_name, + IcebergTables.table_name == table_name, + IcebergTables.metadata_location == current_table.metadata_location, + ) + .one() + ) + tbl.metadata_location = new_metadata_location + tbl.previous_metadata_location = current_table.metadata_location + except NoResultFound as e: + raise CommitFailedException(f"Table has been updated by another process: {database_name}.{table_name}") from e + session.commit() + + return CommitTableResponse(metadata=updated_metadata, metadata_location=new_metadata_location) def _namespace_exists(self, identifier: Union[str, Identifier]) -> bool: namespace = self.identifier_to_database(identifier) diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 95dc24ad15..8bf921aa4d 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -42,6 +42,7 @@ SortOrder, ) from pyiceberg.transforms import IdentityTransform +from pyiceberg.types import IntegerType @pytest.fixture(name="warehouse", scope="session") @@ -87,6 +88,19 @@ def catalog_sqlite(warehouse: Path) -> Generator[SqlCatalog, None, None]: catalog.destroy_tables() +@pytest.fixture(scope="module") +def catalog_sqlite_without_rowcount(warehouse: Path) -> Generator[SqlCatalog, None, None]: + props = { + "uri": "sqlite:////tmp/sql-catalog.db", + "warehouse": f"file://{warehouse}", + } + catalog = SqlCatalog("test_sql_catalog", **props) + catalog.engine.dialect.supports_sane_rowcount = False + catalog.create_tables() + yield catalog + catalog.destroy_tables() + + def test_creation_with_no_uri() -> None: with pytest.raises(NoSuchPropertyException): SqlCatalog("test_ddb_catalog", not_uri="unused") @@ -305,6 +319,7 @@ def test_load_table_from_self_identifier(catalog: SqlCatalog, table_schema_neste [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_drop_table(catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier) -> None: @@ -322,6 +337,7 @@ def test_drop_table(catalog: SqlCatalog, table_schema_nested: Schema, random_ide [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_drop_table_from_self_identifier(catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier) -> None: @@ -341,6 +357,7 @@ def test_drop_table_from_self_identifier(catalog: SqlCatalog, table_schema_neste [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_drop_table_that_does_not_exist(catalog: SqlCatalog, random_identifier: Identifier) -> None: @@ -353,6 +370,7 @@ def test_drop_table_that_does_not_exist(catalog: SqlCatalog, random_identifier: [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_table( @@ -377,6 +395,7 @@ def test_rename_table( [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_table_from_self_identifier( @@ -403,6 +422,7 @@ def test_rename_table_from_self_identifier( [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_table_to_existing_one( @@ -425,6 +445,7 @@ def test_rename_table_to_existing_one( [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_missing_table(catalog: SqlCatalog, random_identifier: Identifier, another_random_identifier: Identifier) -> None: @@ -439,6 +460,7 @@ def test_rename_missing_table(catalog: SqlCatalog, random_identifier: Identifier [ lazy_fixture('catalog_memory'), lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), ], ) def test_rename_table_to_missing_namespace( @@ -664,3 +686,36 @@ def test_update_namespace_properties(catalog: SqlCatalog, database_name: str) -> else: assert k in update_report.removed assert "updated test description" == catalog.load_namespace_properties(database_name)["comment"] + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +def test_commit_table(catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table = catalog.create_table(random_identifier, table_schema_nested) + + assert catalog._parse_metadata_version(table.metadata_location) == 0 + assert table.metadata.current_schema_id == 0 + + transaction = table.transaction() + update = transaction.update_schema() + update.add_column(path="b", field_type=IntegerType()) + update.commit() + transaction.commit_transaction() + + updated_table_metadata = table.metadata + + assert catalog._parse_metadata_version(table.metadata_location) == 1 + assert updated_table_metadata.current_schema_id == 1 + assert len(updated_table_metadata.schemas) == 2 + new_schema = next(schema for schema in updated_table_metadata.schemas if schema.schema_id == 1) + assert new_schema + assert new_schema == update._apply() + assert new_schema.find_field("b").field_type == IntegerType() From 06e2b2df48e9ff383a09550e3e307d290fe39ddc Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 17 Jan 2024 16:30:43 +0100 Subject: [PATCH 025/169] Add Hive integration tests (#207) * Add Hive for CI * Add Hive integration tests * Add missing licenses * Fix * Remove Arrow * Add catalog * Update test suite * Whitespace --- dev/Dockerfile | 10 +- dev/docker-compose-integration.yml | 14 + dev/hive/Dockerfile | 34 ++ dev/hive/core-site.xml | 53 ++ dev/provision.py | 527 +++++++++--------- dev/spark-defaults.conf | 20 +- tests/integration/__init__.py | 16 + .../test_catalogs.py} | 145 ++--- .../test_rest_manifest.py} | 0 .../test_rest_schema.py} | 0 10 files changed, 466 insertions(+), 353 deletions(-) create mode 100644 dev/hive/Dockerfile create mode 100644 dev/hive/core-site.xml create mode 100644 tests/integration/__init__.py rename tests/{test_integration.py => integration/test_catalogs.py} (70%) rename tests/{test_integration_manifest.py => integration/test_rest_manifest.py} (100%) rename tests/{test_integration_schema.py => integration/test_rest_schema.py} (100%) diff --git a/dev/Dockerfile b/dev/Dockerfile index 1f001f5c12..44783d0809 100644 --- a/dev/Dockerfile +++ b/dev/Dockerfile @@ -38,9 +38,8 @@ WORKDIR ${SPARK_HOME} ENV SPARK_VERSION=3.4.2 ENV ICEBERG_SPARK_RUNTIME_VERSION=3.4_2.12 -ENV ICEBERG_VERSION=1.4.0 -ENV AWS_SDK_VERSION=2.20.18 -ENV PYICEBERG_VERSION=0.4.0 +ENV ICEBERG_VERSION=1.4.2 +ENV PYICEBERG_VERSION=0.5.1 RUN curl --retry 3 -s -C - https://dlcdn.apache.org/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop3.tgz -o spark-${SPARK_VERSION}-bin-hadoop3.tgz \ && tar xzf spark-${SPARK_VERSION}-bin-hadoop3.tgz --directory /opt/spark --strip-components 1 \ @@ -51,8 +50,7 @@ RUN curl -s https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-spark-runt && mv iceberg-spark-runtime-${ICEBERG_SPARK_RUNTIME_VERSION}-${ICEBERG_VERSION}.jar /opt/spark/jars # Download AWS bundle -RUN curl -s https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-aws-bundle/${ICEBERG_VERSION}/iceberg-aws-bundle-${ICEBERG_VERSION}.jar -Lo iceberg-aws-bundle-${ICEBERG_VERSION}.jar \ - && mv iceberg-aws-bundle-${ICEBERG_VERSION}.jar /opt/spark/jars +RUN curl -s https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-aws-bundle/${ICEBERG_VERSION}/iceberg-aws-bundle-${ICEBERG_VERSION}.jar -Lo /opt/spark/jars/iceberg-aws-bundle-${ICEBERG_VERSION}.jar COPY spark-defaults.conf /opt/spark/conf ENV PATH="/opt/spark/sbin:/opt/spark/bin:${PATH}" @@ -62,7 +60,7 @@ RUN chmod u+x /opt/spark/sbin/* && \ RUN pip3 install -q ipython -RUN pip3 install "pyiceberg[s3fs]==${PYICEBERG_VERSION}" +RUN pip3 install "pyiceberg[s3fs,hive]==${PYICEBERG_VERSION}" COPY entrypoint.sh . COPY provision.py . diff --git a/dev/docker-compose-integration.yml b/dev/docker-compose-integration.yml index 658bd698c9..fccdcdc757 100644 --- a/dev/docker-compose-integration.yml +++ b/dev/docker-compose-integration.yml @@ -25,6 +25,7 @@ services: iceberg_net: depends_on: - rest + - hive - minio volumes: - ./warehouse:/home/iceberg/warehouse @@ -37,6 +38,7 @@ services: - 8080:8080 links: - rest:rest + - hive:hive - minio:minio rest: image: tabulario/iceberg-rest @@ -85,5 +87,17 @@ services: /usr/bin/mc policy set public minio/warehouse; tail -f /dev/null " + hive: + build: hive/ + container_name: hive + hostname: hive + networks: + iceberg_net: + ports: + - 9083:9083 + environment: + SERVICE_NAME: "metastore" + SERVICE_OPTS: "-Dmetastore.warehouse.dir=s3a://warehouse/hive/" + networks: iceberg_net: diff --git a/dev/hive/Dockerfile b/dev/hive/Dockerfile new file mode 100644 index 0000000000..ee633934c3 --- /dev/null +++ b/dev/hive/Dockerfile @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM openjdk:8-jre-slim AS build + +RUN apt-get update -qq && apt-get -qq -y install curl + +ENV AWSSDK_VERSION=2.20.18 +ENV HADOOP_VERSION=3.1.0 + +RUN curl https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.11.271/aws-java-sdk-bundle-1.11.271.jar -Lo /tmp/aws-java-sdk-bundle-1.11.271.jar +RUN curl https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/${HADOOP_VERSION}/hadoop-aws-${HADOOP_VERSION}.jar -Lo /tmp/hadoop-aws-${HADOOP_VERSION}.jar + + +FROM apache/hive:3.1.3 + +ENV AWSSDK_VERSION=2.20.18 +ENV HADOOP_VERSION=3.1.0 + +COPY --from=build /tmp/hadoop-aws-${HADOOP_VERSION}.jar /opt/hive/lib/hadoop-aws-${HADOOP_VERSION}.jar +COPY --from=build /tmp/aws-java-sdk-bundle-1.11.271.jar /opt/hive/lib/aws-java-sdk-bundle-1.11.271.jar +COPY core-site.xml /opt/hadoop/etc/hadoop/core-site.xml diff --git a/dev/hive/core-site.xml b/dev/hive/core-site.xml new file mode 100644 index 0000000000..b77332b83b --- /dev/null +++ b/dev/hive/core-site.xml @@ -0,0 +1,53 @@ + + + + + + + fs.defaultFS + s3a://warehouse/hive + + + fs.s3a.impl + org.apache.hadoop.fs.s3a.S3AFileSystem + + + fs.s3a.fast.upload + true + + + fs.s3a.endpoint + http://minio:9000 + + + fs.s3a.access.key + admin + + + fs.s3a.secret.key + password + + + fs.s3a.connection.ssl.enabled + false + + + fs.s3a.path.style.access + true + + diff --git a/dev/provision.py b/dev/provision.py index 9917cd3f20..e5048d2fa5 100644 --- a/dev/provision.py +++ b/dev/provision.py @@ -24,318 +24,299 @@ spark = SparkSession.builder.getOrCreate() -spark.sql( - """ - CREATE DATABASE IF NOT EXISTS default; -""" -) - -schema = Schema( - NestedField(field_id=1, name="uuid_col", field_type=UUIDType(), required=False), - NestedField(field_id=2, name="fixed_col", field_type=FixedType(25), required=False), -) - -catalog = load_catalog( - "local", - **{ - "type": "rest", - "uri": "http://rest:8181", - "s3.endpoint": "http://minio:9000", - "s3.access-key-id": "admin", - "s3.secret-access-key": "password", - }, -) - -catalog.create_table(identifier="default.test_uuid_and_fixed_unpartitioned", schema=schema) - -spark.sql( - """ - INSERT INTO default.test_uuid_and_fixed_unpartitioned VALUES - ('102cb62f-e6f8-4eb0-9973-d9b012ff0967', CAST('1234567890123456789012345' AS BINARY)), - ('ec33e4b2-a834-4cc3-8c4a-a1d3bfc2f226', CAST('1231231231231231231231231' AS BINARY)), - ('639cccce-c9d2-494a-a78c-278ab234f024', CAST('12345678901234567ass12345' AS BINARY)), - ('c1b0d8e0-0b0e-4b1e-9b0a-0e0b0d0c0a0b', CAST('asdasasdads12312312312111' AS BINARY)), - ('923dae77-83d6-47cd-b4b0-d383e64ee57e', CAST('qweeqwwqq1231231231231111' AS BINARY)); +catalogs = { + 'rest': load_catalog( + "rest", + **{ + "type": "rest", + "uri": "http://rest:8181", + "s3.endpoint": "http://minio:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ), + 'hive': load_catalog( + "hive", + **{ + "type": "hive", + "uri": "http://hive:9083", + "s3.endpoint": "http://minio:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ), +} + +for catalog_name, catalog in catalogs.items(): + spark.sql( + f""" + CREATE DATABASE IF NOT EXISTS {catalog_name}.default; """ -) + ) -spark.sql( - """ - CREATE OR REPLACE TABLE default.test_null_nan - USING iceberg - AS SELECT - 1 AS idx, - float('NaN') AS col_numeric -UNION ALL SELECT - 2 AS idx, - null AS col_numeric -UNION ALL SELECT - 3 AS idx, - 1 AS col_numeric -""" -) - -spark.sql( - """ - CREATE OR REPLACE TABLE default.test_null_nan_rewritten - USING iceberg - AS SELECT * FROM default.test_null_nan -""" -) + schema = Schema( + NestedField(field_id=1, name="uuid_col", field_type=UUIDType(), required=False), + NestedField(field_id=2, name="fixed_col", field_type=FixedType(25), required=False), + ) -spark.sql( - """ -CREATE OR REPLACE TABLE default.test_limit as - SELECT * LATERAL VIEW explode(ARRAY(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) AS idx; -""" -) + catalog.create_table(identifier="default.test_uuid_and_fixed_unpartitioned", schema=schema) -spark.sql( - """ -CREATE OR REPLACE TABLE default.test_positional_mor_deletes ( - dt date, - number integer, - letter string -) -USING iceberg -TBLPROPERTIES ( - 'write.delete.mode'='merge-on-read', - 'write.update.mode'='merge-on-read', - 'write.merge.mode'='merge-on-read', - 'format-version'='2' -); -""" -) - -# Partitioning is not really needed, but there is a bug: -# https://github.com/apache/iceberg/pull/7685 -spark.sql( - """ - ALTER TABLE default.test_positional_mor_deletes ADD PARTITION FIELD years(dt) AS dt_years -""" -) + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_uuid_and_fixed_unpartitioned VALUES + ('102cb62f-e6f8-4eb0-9973-d9b012ff0967', CAST('1234567890123456789012345' AS BINARY)), + ('ec33e4b2-a834-4cc3-8c4a-a1d3bfc2f226', CAST('1231231231231231231231231' AS BINARY)), + ('639cccce-c9d2-494a-a78c-278ab234f024', CAST('12345678901234567ass12345' AS BINARY)), + ('c1b0d8e0-0b0e-4b1e-9b0a-0e0b0d0c0a0b', CAST('asdasasdads12312312312111' AS BINARY)), + ('923dae77-83d6-47cd-b4b0-d383e64ee57e', CAST('qweeqwwqq1231231231231111' AS BINARY)); + """ + ) -spark.sql( - """ -INSERT INTO default.test_positional_mor_deletes -VALUES - (CAST('2023-03-01' AS date), 1, 'a'), - (CAST('2023-03-02' AS date), 2, 'b'), - (CAST('2023-03-03' AS date), 3, 'c'), - (CAST('2023-03-04' AS date), 4, 'd'), - (CAST('2023-03-05' AS date), 5, 'e'), - (CAST('2023-03-06' AS date), 6, 'f'), - (CAST('2023-03-07' AS date), 7, 'g'), - (CAST('2023-03-08' AS date), 8, 'h'), - (CAST('2023-03-09' AS date), 9, 'i'), - (CAST('2023-03-10' AS date), 10, 'j'), - (CAST('2023-03-11' AS date), 11, 'k'), - (CAST('2023-03-12' AS date), 12, 'l'); -""" -) - -spark.sql( - """ -ALTER TABLE default.test_positional_mor_deletes CREATE TAG tag_12 + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_null_nan + USING iceberg + AS SELECT + 1 AS idx, + float('NaN') AS col_numeric + UNION ALL SELECT + 2 AS idx, + null AS col_numeric + UNION ALL SELECT + 3 AS idx, + 1 AS col_numeric """ -) + ) -spark.sql( - """ -ALTER TABLE default.test_positional_mor_deletes CREATE BRANCH without_5 + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_null_nan_rewritten + USING iceberg + AS SELECT * FROM default.test_null_nan """ -) + ) -spark.sql( - """ -DELETE FROM default.test_positional_mor_deletes.branch_without_5 WHERE number = 5 + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_limit as + SELECT * LATERAL VIEW explode(ARRAY(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) AS idx; """ -) + ) -spark.sql( + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.test_positional_mor_deletes ( + dt date, + number integer, + letter string + ) + USING iceberg + TBLPROPERTIES ( + 'write.delete.mode'='merge-on-read', + 'write.update.mode'='merge-on-read', + 'write.merge.mode'='merge-on-read', + 'format-version'='2' + ); """ -DELETE FROM default.test_positional_mor_deletes WHERE number = 9 -""" -) + ) -spark.sql( - """ - CREATE OR REPLACE TABLE default.test_positional_mor_double_deletes ( - dt date, - number integer, - letter string - ) - USING iceberg - TBLPROPERTIES ( - 'write.delete.mode'='merge-on-read', - 'write.update.mode'='merge-on-read', - 'write.merge.mode'='merge-on-read', - 'format-version'='2' - ); -""" -) - -# Partitioning is not really needed, but there is a bug: -# https://github.com/apache/iceberg/pull/7685 -spark.sql( - """ - ALTER TABLE default.test_positional_mor_double_deletes ADD PARTITION FIELD years(dt) AS dt_years -""" -) + # Partitioning is not really needed, but there is a bug: + # https://github.com/apache/iceberg/pull/7685 + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_deletes ADD PARTITION FIELD years(dt) AS dt_years") -spark.sql( - """ -INSERT INTO default.test_positional_mor_double_deletes -VALUES - (CAST('2023-03-01' AS date), 1, 'a'), - (CAST('2023-03-02' AS date), 2, 'b'), - (CAST('2023-03-03' AS date), 3, 'c'), - (CAST('2023-03-04' AS date), 4, 'd'), - (CAST('2023-03-05' AS date), 5, 'e'), - (CAST('2023-03-06' AS date), 6, 'f'), - (CAST('2023-03-07' AS date), 7, 'g'), - (CAST('2023-03-08' AS date), 8, 'h'), - (CAST('2023-03-09' AS date), 9, 'i'), - (CAST('2023-03-10' AS date), 10, 'j'), - (CAST('2023-03-11' AS date), 11, 'k'), - (CAST('2023-03-12' AS date), 12, 'l'); -""" -) - -spark.sql( + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_positional_mor_deletes + VALUES + (CAST('2023-03-01' AS date), 1, 'a'), + (CAST('2023-03-02' AS date), 2, 'b'), + (CAST('2023-03-03' AS date), 3, 'c'), + (CAST('2023-03-04' AS date), 4, 'd'), + (CAST('2023-03-05' AS date), 5, 'e'), + (CAST('2023-03-06' AS date), 6, 'f'), + (CAST('2023-03-07' AS date), 7, 'g'), + (CAST('2023-03-08' AS date), 8, 'h'), + (CAST('2023-03-09' AS date), 9, 'i'), + (CAST('2023-03-10' AS date), 10, 'j'), + (CAST('2023-03-11' AS date), 11, 'k'), + (CAST('2023-03-12' AS date), 12, 'l'); """ - DELETE FROM default.test_positional_mor_double_deletes WHERE number = 9 -""" -) + ) + + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_deletes CREATE TAG tag_12") + + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_deletes CREATE BRANCH without_5") + + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_deletes.branch_without_5 WHERE number = 5") + + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_deletes WHERE number = 9") -spark.sql( - """ - DELETE FROM default.test_positional_mor_double_deletes WHERE letter == 'f' -""" -) - -all_types_dataframe = ( - spark.range(0, 5, 1, 5) - .withColumnRenamed("id", "longCol") - .withColumn("intCol", expr("CAST(longCol AS INT)")) - .withColumn("floatCol", expr("CAST(longCol AS FLOAT)")) - .withColumn("doubleCol", expr("CAST(longCol AS DOUBLE)")) - .withColumn("dateCol", date_add(current_date(), 1)) - .withColumn("timestampCol", expr("TO_TIMESTAMP(dateCol)")) - .withColumn("stringCol", expr("CAST(dateCol AS STRING)")) - .withColumn("booleanCol", expr("longCol > 5")) - .withColumn("binaryCol", expr("CAST(longCol AS BINARY)")) - .withColumn("byteCol", expr("CAST(longCol AS BYTE)")) - .withColumn("decimalCol", expr("CAST(longCol AS DECIMAL(10, 2))")) - .withColumn("shortCol", expr("CAST(longCol AS SHORT)")) - .withColumn("mapCol", expr("MAP(longCol, decimalCol)")) - .withColumn("arrayCol", expr("ARRAY(longCol)")) - .withColumn("structCol", expr("STRUCT(mapCol, arrayCol)")) -) - -all_types_dataframe.writeTo("default.test_all_types").tableProperty("format-version", "2").partitionedBy( - "intCol" -).createOrReplace() - -for table_name, partition in [ - ("test_partitioned_by_identity", "ts"), - ("test_partitioned_by_years", "years(dt)"), - ("test_partitioned_by_months", "months(dt)"), - ("test_partitioned_by_days", "days(ts)"), - ("test_partitioned_by_hours", "hours(ts)"), - ("test_partitioned_by_truncate", "truncate(1, letter)"), - ("test_partitioned_by_bucket", "bucket(16, number)"), -]: spark.sql( f""" - CREATE OR REPLACE TABLE default.{table_name} ( + CREATE OR REPLACE TABLE {catalog_name}.default.test_positional_mor_double_deletes ( dt date, - ts timestamp, number integer, letter string ) - USING iceberg; + USING iceberg + TBLPROPERTIES ( + 'write.delete.mode'='merge-on-read', + 'write.update.mode'='merge-on-read', + 'write.merge.mode'='merge-on-read', + 'format-version'='2' + ); """ ) - spark.sql(f"ALTER TABLE default.{table_name} ADD PARTITION FIELD {partition}") + # Partitioning is not really needed, but there is a bug: + # https://github.com/apache/iceberg/pull/7685 + spark.sql(f"ALTER TABLE {catalog_name}.default.test_positional_mor_double_deletes ADD PARTITION FIELD years(dt) AS dt_years") spark.sql( f""" - INSERT INTO default.{table_name} + INSERT INTO {catalog_name}.default.test_positional_mor_double_deletes VALUES - (CAST('2022-03-01' AS date), CAST('2022-03-01 01:22:00' AS timestamp), 1, 'a'), - (CAST('2022-03-02' AS date), CAST('2022-03-02 02:22:00' AS timestamp), 2, 'b'), - (CAST('2022-03-03' AS date), CAST('2022-03-03 03:22:00' AS timestamp), 3, 'c'), - (CAST('2022-03-04' AS date), CAST('2022-03-04 04:22:00' AS timestamp), 4, 'd'), - (CAST('2023-03-05' AS date), CAST('2023-03-05 05:22:00' AS timestamp), 5, 'e'), - (CAST('2023-03-06' AS date), CAST('2023-03-06 06:22:00' AS timestamp), 6, 'f'), - (CAST('2023-03-07' AS date), CAST('2023-03-07 07:22:00' AS timestamp), 7, 'g'), - (CAST('2023-03-08' AS date), CAST('2023-03-08 08:22:00' AS timestamp), 8, 'h'), - (CAST('2023-03-09' AS date), CAST('2023-03-09 09:22:00' AS timestamp), 9, 'i'), - (CAST('2023-03-10' AS date), CAST('2023-03-10 10:22:00' AS timestamp), 10, 'j'), - (CAST('2023-03-11' AS date), CAST('2023-03-11 11:22:00' AS timestamp), 11, 'k'), - (CAST('2023-03-12' AS date), CAST('2023-03-12 12:22:00' AS timestamp), 12, 'l'); + (CAST('2023-03-01' AS date), 1, 'a'), + (CAST('2023-03-02' AS date), 2, 'b'), + (CAST('2023-03-03' AS date), 3, 'c'), + (CAST('2023-03-04' AS date), 4, 'd'), + (CAST('2023-03-05' AS date), 5, 'e'), + (CAST('2023-03-06' AS date), 6, 'f'), + (CAST('2023-03-07' AS date), 7, 'g'), + (CAST('2023-03-08' AS date), 8, 'h'), + (CAST('2023-03-09' AS date), 9, 'i'), + (CAST('2023-03-10' AS date), 10, 'j'), + (CAST('2023-03-11' AS date), 11, 'k'), + (CAST('2023-03-12' AS date), 12, 'l'); """ ) -# There is an issue with CREATE OR REPLACE -# https://github.com/apache/iceberg/issues/8756 -spark.sql( + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_double_deletes WHERE number = 9") + + spark.sql(f"DELETE FROM {catalog_name}.default.test_positional_mor_double_deletes WHERE letter == 'f'") + + all_types_dataframe = ( + spark.range(0, 5, 1, 5) + .withColumnRenamed("id", "longCol") + .withColumn("intCol", expr("CAST(longCol AS INT)")) + .withColumn("floatCol", expr("CAST(longCol AS FLOAT)")) + .withColumn("doubleCol", expr("CAST(longCol AS DOUBLE)")) + .withColumn("dateCol", date_add(current_date(), 1)) + .withColumn("timestampCol", expr("TO_TIMESTAMP(dateCol)")) + .withColumn("stringCol", expr("CAST(dateCol AS STRING)")) + .withColumn("booleanCol", expr("longCol > 5")) + .withColumn("binaryCol", expr("CAST(longCol AS BINARY)")) + .withColumn("byteCol", expr("CAST(longCol AS BYTE)")) + .withColumn("decimalCol", expr("CAST(longCol AS DECIMAL(10, 2))")) + .withColumn("shortCol", expr("CAST(longCol AS SHORT)")) + .withColumn("mapCol", expr("MAP(longCol, decimalCol)")) + .withColumn("arrayCol", expr("ARRAY(longCol)")) + .withColumn("structCol", expr("STRUCT(mapCol, arrayCol)")) + ) + + all_types_dataframe.writeTo(f"{catalog_name}.default.test_all_types").tableProperty("format-version", "2").partitionedBy( + "intCol" + ).createOrReplace() + + for table_name, partition in [ + ("test_partitioned_by_identity", "ts"), + ("test_partitioned_by_years", "years(dt)"), + ("test_partitioned_by_months", "months(dt)"), + ("test_partitioned_by_days", "days(ts)"), + ("test_partitioned_by_hours", "hours(ts)"), + ("test_partitioned_by_truncate", "truncate(1, letter)"), + ("test_partitioned_by_bucket", "bucket(16, number)"), + ]: + spark.sql( + f""" + CREATE OR REPLACE TABLE {catalog_name}.default.{table_name} ( + dt date, + ts timestamp, + number integer, + letter string + ) + USING iceberg; + """ + ) + + spark.sql(f"ALTER TABLE {catalog_name}.default.{table_name} ADD PARTITION FIELD {partition}") + + spark.sql( + f""" + INSERT INTO {catalog_name}.default.{table_name} + VALUES + (CAST('2022-03-01' AS date), CAST('2022-03-01 01:22:00' AS timestamp), 1, 'a'), + (CAST('2022-03-02' AS date), CAST('2022-03-02 02:22:00' AS timestamp), 2, 'b'), + (CAST('2022-03-03' AS date), CAST('2022-03-03 03:22:00' AS timestamp), 3, 'c'), + (CAST('2022-03-04' AS date), CAST('2022-03-04 04:22:00' AS timestamp), 4, 'd'), + (CAST('2023-03-05' AS date), CAST('2023-03-05 05:22:00' AS timestamp), 5, 'e'), + (CAST('2023-03-06' AS date), CAST('2023-03-06 06:22:00' AS timestamp), 6, 'f'), + (CAST('2023-03-07' AS date), CAST('2023-03-07 07:22:00' AS timestamp), 7, 'g'), + (CAST('2023-03-08' AS date), CAST('2023-03-08 08:22:00' AS timestamp), 8, 'h'), + (CAST('2023-03-09' AS date), CAST('2023-03-09 09:22:00' AS timestamp), 9, 'i'), + (CAST('2023-03-10' AS date), CAST('2023-03-10 10:22:00' AS timestamp), 10, 'j'), + (CAST('2023-03-11' AS date), CAST('2023-03-11 11:22:00' AS timestamp), 11, 'k'), + (CAST('2023-03-12' AS date), CAST('2023-03-12 12:22:00' AS timestamp), 12, 'l'); + """ + ) + + # There is an issue with CREATE OR REPLACE + # https://github.com/apache/iceberg/issues/8756 + spark.sql(f"DROP TABLE IF EXISTS {catalog_name}.default.test_table_version") + + spark.sql( + f""" + CREATE TABLE {catalog_name}.default.test_table_version ( + dt date, + number integer, + letter string + ) + USING iceberg + TBLPROPERTIES ( + 'format-version'='1' + ); """ -DROP TABLE IF EXISTS default.test_table_version -""" -) + ) -spark.sql( + spark.sql( + f""" + CREATE TABLE {catalog_name}.default.test_table_sanitized_character ( + `letter/abc` string + ) + USING iceberg + TBLPROPERTIES ( + 'format-version'='1' + ); """ -CREATE TABLE default.test_table_version ( - dt date, - number integer, - letter string -) -USING iceberg -TBLPROPERTIES ( - 'format-version'='1' -); -""" -) - -spark.sql( + ) + + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_table_sanitized_character + VALUES + ('123') """ -CREATE TABLE default.test_table_sanitized_character ( - `letter/abc` string -) -USING iceberg -TBLPROPERTIES ( - 'format-version'='1' -); -""" -) - -spark.sql( + ) + + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_table_sanitized_character + VALUES + ('123') """ -INSERT INTO default.test_table_sanitized_character -VALUES - ('123') -""" -) + ) -spark.sql( + spark.sql( + f""" + CREATE TABLE {catalog_name}.default.test_table_add_column ( + a string + ) + USING iceberg """ -CREATE TABLE default.test_table_add_column ( - a string -) -USING iceberg -""" -) + ) -spark.sql("INSERT INTO default.test_table_add_column VALUES ('1')") + spark.sql(f"INSERT INTO {catalog_name}.default.test_table_add_column VALUES ('1')") -spark.sql( - """ -ALTER TABLE default.test_table_add_column ADD COLUMN b string -""" -) + spark.sql(f"ALTER TABLE {catalog_name}.default.test_table_add_column ADD COLUMN b string") -spark.sql("INSERT INTO default.test_table_add_column VALUES ('2', '2')") + spark.sql(f"INSERT INTO {catalog_name}.default.test_table_add_column VALUES ('2', '2')") diff --git a/dev/spark-defaults.conf b/dev/spark-defaults.conf index 56c345432a..2316336fea 100644 --- a/dev/spark-defaults.conf +++ b/dev/spark-defaults.conf @@ -16,13 +16,19 @@ # spark.sql.extensions org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions -spark.sql.catalog.demo org.apache.iceberg.spark.SparkCatalog -spark.sql.catalog.demo.type rest -spark.sql.catalog.demo.uri http://rest:8181 -spark.sql.catalog.demo.io-impl org.apache.iceberg.aws.s3.S3FileIO -spark.sql.catalog.demo.warehouse s3://warehouse/wh/ -spark.sql.catalog.demo.s3.endpoint http://minio:9000 -spark.sql.defaultCatalog demo +spark.sql.catalog.rest org.apache.iceberg.spark.SparkCatalog +spark.sql.catalog.rest.type rest +spark.sql.catalog.rest.uri http://rest:8181 +spark.sql.catalog.rest.io-impl org.apache.iceberg.aws.s3.S3FileIO +spark.sql.catalog.rest.warehouse s3://warehouse/rest/ +spark.sql.catalog.rest.s3.endpoint http://minio:9000 +spark.sql.catalog.hive org.apache.iceberg.spark.SparkCatalog +spark.sql.catalog.hive.type hive +spark.sql.catalog.hive.uri http://hive:9083 +spark.sql.catalog.hive.io-impl org.apache.iceberg.aws.s3.S3FileIO +spark.sql.catalog.hive.warehouse s3://warehouse/hive/ +spark.sql.catalog.hive.s3.endpoint http://minio:9000 +spark.sql.defaultCatalog rest spark.eventLog.enabled true spark.eventLog.dir /home/iceberg/spark-events spark.history.fs.logDirectory /home/iceberg/spark-events diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000000..13a83393a9 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/test_integration.py b/tests/integration/test_catalogs.py similarity index 70% rename from tests/test_integration.py rename to tests/integration/test_catalogs.py index 2a173be3b3..3fbdb69ebe 100644 --- a/tests/test_integration.py +++ b/tests/integration/test_catalogs.py @@ -25,6 +25,7 @@ from pyarrow.fs import S3FileSystem from pyiceberg.catalog import Catalog, load_catalog +from pyiceberg.catalog.hive import HiveCatalog from pyiceberg.exceptions import NoSuchTableError from pyiceberg.expressions import ( And, @@ -50,7 +51,7 @@ @pytest.fixture() -def catalog() -> Catalog: +def catalog_rest() -> Catalog: return load_catalog( "local", **{ @@ -64,40 +65,23 @@ def catalog() -> Catalog: @pytest.fixture() -def table_test_null_nan(catalog: Catalog) -> Table: - return catalog.load_table("default.test_null_nan") - - -@pytest.fixture() -def table_test_null_nan_rewritten(catalog: Catalog) -> Table: - return catalog.load_table("default.test_null_nan_rewritten") - - -@pytest.fixture() -def table_test_limit(catalog: Catalog) -> Table: - return catalog.load_table("default.test_limit") - - -@pytest.fixture() -def table_test_all_types(catalog: Catalog) -> Table: - return catalog.load_table("default.test_all_types") - - -@pytest.fixture() -def table_test_table_version(catalog: Catalog) -> Table: - return catalog.load_table("default.test_table_version") - - -@pytest.fixture() -def table_test_table_sanitized_character(catalog: Catalog) -> Table: - return catalog.load_table("default.test_table_sanitized_character") +def catalog_hive() -> Catalog: + return load_catalog( + "local", + **{ + "type": "hive", + "uri": "http://localhost:9083", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) TABLE_NAME = ("default", "t1") -@pytest.fixture() -def table(catalog: Catalog) -> Table: +def create_table(catalog: Catalog) -> Table: try: catalog.drop_table(TABLE_NAME) except NoSuchTableError: @@ -115,7 +99,12 @@ def table(catalog: Catalog) -> Table: @pytest.mark.integration -def test_table_properties(table: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_table_properties(catalog: Catalog) -> None: + if isinstance(catalog, HiveCatalog): + pytest.skip("Not yet implemented: https://github.com/apache/iceberg-python/issues/275") + table = create_table(catalog) + assert table.properties == DEFAULT_PROPERTIES with table.transaction() as transaction: @@ -137,26 +126,10 @@ def test_table_properties(table: Table) -> None: assert table.properties == DEFAULT_PROPERTIES -@pytest.fixture() -def test_positional_mor_deletes(catalog: Catalog) -> Table: - """Table that has positional deletes""" - return catalog.load_table("default.test_positional_mor_deletes") - - -@pytest.fixture() -def test_table_add_column(catalog: Catalog) -> Table: - """Table that has a new column""" - return catalog.load_table("default.test_table_add_column") - - -@pytest.fixture() -def test_positional_mor_double_deletes(catalog: Catalog) -> Table: - """Table that has multiple positional deletes""" - return catalog.load_table("default.test_positional_mor_double_deletes") - - @pytest.mark.integration -def test_pyarrow_nan(table_test_null_nan: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_nan(catalog: Catalog) -> None: + table_test_null_nan = catalog.load_table("default.test_null_nan") arrow_table = table_test_null_nan.scan(row_filter=IsNaN("col_numeric"), selected_fields=("idx", "col_numeric")).to_arrow() assert len(arrow_table) == 1 assert arrow_table["idx"][0].as_py() == 1 @@ -164,7 +137,9 @@ def test_pyarrow_nan(table_test_null_nan: Table) -> None: @pytest.mark.integration -def test_pyarrow_nan_rewritten(table_test_null_nan_rewritten: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_nan_rewritten(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") arrow_table = table_test_null_nan_rewritten.scan( row_filter=IsNaN("col_numeric"), selected_fields=("idx", "col_numeric") ).to_arrow() @@ -174,14 +149,18 @@ def test_pyarrow_nan_rewritten(table_test_null_nan_rewritten: Table) -> None: @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) @pytest.mark.skip(reason="Fixing issues with NaN's: https://github.com/apache/arrow/issues/34162") -def test_pyarrow_not_nan_count(table_test_null_nan: Table) -> None: +def test_pyarrow_not_nan_count(catalog: Catalog) -> None: + table_test_null_nan = catalog.load_table("default.test_null_nan") not_nan = table_test_null_nan.scan(row_filter=NotNaN("col_numeric"), selected_fields=("idx",)).to_arrow() assert len(not_nan) == 2 @pytest.mark.integration -def test_duckdb_nan(table_test_null_nan_rewritten: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_duckdb_nan(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") con = table_test_null_nan_rewritten.scan().to_duckdb("table_test_null_nan") result = con.query("SELECT idx, col_numeric FROM table_test_null_nan WHERE isnan(col_numeric)").fetchone() assert result[0] == 1 @@ -189,7 +168,9 @@ def test_duckdb_nan(table_test_null_nan_rewritten: Table) -> None: @pytest.mark.integration -def test_pyarrow_limit(table_test_limit: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_limit(catalog: Catalog) -> None: + table_test_limit = catalog.load_table("default.test_limit") limited_result = table_test_limit.scan(selected_fields=("idx",), limit=1).to_arrow() assert len(limited_result) == 1 @@ -200,16 +181,20 @@ def test_pyarrow_limit(table_test_limit: Table) -> None: assert len(full_result) == 10 -@pytest.mark.filterwarnings("ignore") @pytest.mark.integration -def test_ray_nan(table_test_null_nan_rewritten: Table) -> None: +@pytest.mark.filterwarnings("ignore") +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_ray_nan(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") ray_dataset = table_test_null_nan_rewritten.scan().to_ray() assert ray_dataset.count() == 3 assert math.isnan(ray_dataset.take()[0]["col_numeric"]) @pytest.mark.integration -def test_ray_nan_rewritten(table_test_null_nan_rewritten: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_ray_nan_rewritten(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") ray_dataset = table_test_null_nan_rewritten.scan( row_filter=IsNaN("col_numeric"), selected_fields=("idx", "col_numeric") ).to_ray() @@ -219,15 +204,19 @@ def test_ray_nan_rewritten(table_test_null_nan_rewritten: Table) -> None: @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) @pytest.mark.skip(reason="Fixing issues with NaN's: https://github.com/apache/arrow/issues/34162") -def test_ray_not_nan_count(table_test_null_nan_rewritten: Table) -> None: +def test_ray_not_nan_count(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") ray_dataset = table_test_null_nan_rewritten.scan(row_filter=NotNaN("col_numeric"), selected_fields=("idx",)).to_ray() print(ray_dataset.take()) assert ray_dataset.count() == 2 @pytest.mark.integration -def test_ray_all_types(table_test_all_types: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_ray_all_types(catalog: Catalog) -> None: + table_test_all_types = catalog.load_table("default.test_all_types") ray_dataset = table_test_all_types.scan().to_ray() pandas_dataframe = table_test_all_types.scan().to_pandas() assert ray_dataset.count() == pandas_dataframe.shape[0] @@ -235,7 +224,9 @@ def test_ray_all_types(table_test_all_types: Table) -> None: @pytest.mark.integration -def test_pyarrow_to_iceberg_all_types(table_test_all_types: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_to_iceberg_all_types(catalog: Catalog) -> None: + table_test_all_types = catalog.load_table("default.test_all_types") fs = S3FileSystem(endpoint_override="http://localhost:9000", access_key="admin", secret_key="password") data_file_paths = [task.file.file_path for task in table_test_all_types.scan().plan_files()] for data_file_path in data_file_paths: @@ -248,7 +239,8 @@ def test_pyarrow_to_iceberg_all_types(table_test_all_types: Table) -> None: @pytest.mark.integration -def test_pyarrow_deletes(test_positional_mor_deletes: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_deletes(catalog: Catalog) -> None: # number, letter # (1, 'a'), # (2, 'b'), @@ -262,6 +254,7 @@ def test_pyarrow_deletes(test_positional_mor_deletes: Table) -> None: # (10, 'j'), # (11, 'k'), # (12, 'l') + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12] @@ -283,7 +276,8 @@ def test_pyarrow_deletes(test_positional_mor_deletes: Table) -> None: @pytest.mark.integration -def test_pyarrow_deletes_double(test_positional_mor_double_deletes: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_pyarrow_deletes_double(catalog: Catalog) -> None: # number, letter # (1, 'a'), # (2, 'b'), @@ -297,6 +291,7 @@ def test_pyarrow_deletes_double(test_positional_mor_double_deletes: Table) -> No # (10, 'j'), # (11, 'k'), # (12, 'l') + test_positional_mor_double_deletes = catalog.load_table("default.test_positional_mor_double_deletes") arrow_table = test_positional_mor_double_deletes.scan().to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 7, 8, 10, 11, 12] @@ -318,6 +313,7 @@ def test_pyarrow_deletes_double(test_positional_mor_double_deletes: Table) -> No @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) def test_partitioned_tables(catalog: Catalog) -> None: for table_name, predicate in [ ("test_partitioned_by_identity", "ts >= '2023-03-05T00:00:00+00:00'"), @@ -334,6 +330,7 @@ def test_partitioned_tables(catalog: Catalog) -> None: @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) def test_unpartitioned_uuid_table(catalog: Catalog) -> None: unpartitioned_uuid = catalog.load_table("default.test_uuid_and_fixed_unpartitioned") arrow_table_eq = unpartitioned_uuid.scan(row_filter="uuid_col == '102cb62f-e6f8-4eb0-9973-d9b012ff0967'").to_arrow() @@ -350,6 +347,7 @@ def test_unpartitioned_uuid_table(catalog: Catalog) -> None: @pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) def test_unpartitioned_fixed_table(catalog: Catalog) -> None: fixed_table = catalog.load_table("default.test_uuid_and_fixed_unpartitioned") arrow_table_eq = fixed_table.scan(row_filter=EqualTo("fixed_col", b"1234567890123456789012345")).to_arrow() @@ -368,19 +366,25 @@ def test_unpartitioned_fixed_table(catalog: Catalog) -> None: @pytest.mark.integration -def test_scan_tag(test_positional_mor_deletes: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_scan_tag(catalog: Catalog) -> None: + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().use_ref("tag_12").to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] @pytest.mark.integration -def test_scan_branch(test_positional_mor_deletes: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_scan_branch(catalog: Catalog) -> None: + test_positional_mor_deletes = catalog.load_table("default.test_positional_mor_deletes") arrow_table = test_positional_mor_deletes.scan().use_ref("without_5").to_arrow() assert arrow_table["number"].to_pylist() == [1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12] @pytest.mark.integration -def test_filter_on_new_column(test_table_add_column: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_filter_on_new_column(catalog: Catalog) -> None: + test_table_add_column = catalog.load_table("default.test_table_add_column") arrow_table = test_table_add_column.scan(row_filter="b == '2'").to_arrow() assert arrow_table["b"].to_pylist() == ['2'] @@ -392,7 +396,12 @@ def test_filter_on_new_column(test_table_add_column: Table) -> None: @pytest.mark.integration -def test_upgrade_table_version(table_test_table_version: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_upgrade_table_version(catalog: Catalog) -> None: + if isinstance(catalog, HiveCatalog): + pytest.skip("Not yet implemented: https://github.com/apache/iceberg-python/issues/274") + table_test_table_version = catalog.load_table("default.test_table_version") + assert table_test_table_version.format_version == 1 with table_test_table_version.transaction() as transaction: @@ -417,7 +426,9 @@ def test_upgrade_table_version(table_test_table_version: Table) -> None: @pytest.mark.integration -def test_reproduce_issue(table_test_table_sanitized_character: Table) -> None: +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_sanitize_character(catalog: Catalog) -> None: + table_test_table_sanitized_character = catalog.load_table("default.test_table_sanitized_character") arrow_table = table_test_table_sanitized_character.scan().to_arrow() assert len(arrow_table.schema.names), 1 assert len(table_test_table_sanitized_character.schema().fields), 1 diff --git a/tests/test_integration_manifest.py b/tests/integration/test_rest_manifest.py similarity index 100% rename from tests/test_integration_manifest.py rename to tests/integration/test_rest_manifest.py diff --git a/tests/test_integration_schema.py b/tests/integration/test_rest_schema.py similarity index 100% rename from tests/test_integration_schema.py rename to tests/integration/test_rest_schema.py From 8b660f8fe93596bab6b1c34a6c969078b220419e Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 18 Jan 2024 00:51:24 -0500 Subject: [PATCH 026/169] Fix TruncateTransform for falsey values (#276) --- pyiceberg/transforms.py | 2 +- tests/test_transforms.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pyiceberg/transforms.py b/pyiceberg/transforms.py index 2c91f1c92b..4386547c55 100644 --- a/pyiceberg/transforms.py +++ b/pyiceberg/transforms.py @@ -613,7 +613,7 @@ def truncate_func(v: Any) -> Any: else: raise ValueError(f"Cannot truncate for type: {source}") - return lambda v: truncate_func(v) if v else None + return lambda v: truncate_func(v) if v is not None else None def satisfies_order_of(self, other: Transform[S, T]) -> bool: if self == other: diff --git a/tests/test_transforms.py b/tests/test_transforms.py index d7a1478bc4..d4910cc1a1 100644 --- a/tests/test_transforms.py +++ b/tests/test_transforms.py @@ -362,7 +362,7 @@ def test_identity_method(type_var: PrimitiveType) -> None: @pytest.mark.parametrize("type_var", [IntegerType(), LongType()]) @pytest.mark.parametrize( "input_var,expected", - [(1, 0), (5, 0), (9, 0), (10, 10), (11, 10), (-1, -10), (-10, -10), (-12, -20)], + [(1, 0), (5, 0), (9, 0), (10, 10), (11, 10), (-1, -10), (-10, -10), (-12, -20), (0, 0)], ) def test_truncate_integer(type_var: PrimitiveType, input_var: int, expected: int) -> None: trunc = TruncateTransform(10) # type: ignore @@ -377,6 +377,7 @@ def test_truncate_integer(type_var: PrimitiveType, input_var: int, expected: int (Decimal("12.29"), Decimal("12.20")), (Decimal("0.05"), Decimal("0.00")), (Decimal("-0.05"), Decimal("-0.10")), + (Decimal("0.0"), Decimal("0.0")), ], ) def test_truncate_decimal(input_var: Decimal, expected: Decimal) -> None: @@ -384,7 +385,7 @@ def test_truncate_decimal(input_var: Decimal, expected: Decimal) -> None: assert trunc.transform(DecimalType(9, 2))(input_var) == expected -@pytest.mark.parametrize("input_var,expected", [("abcdefg", "abcde"), ("abc", "abc")]) +@pytest.mark.parametrize("input_var,expected", [("abcdefg", "abcde"), ("abc", "abc"), ("", "")]) def test_truncate_string(input_var: str, expected: str) -> None: trunc = TruncateTransform(5) # type: ignore assert trunc.transform(StringType())(input_var) == expected From 8614ba08f57461607496bd34cb03541d0fc5ea2b Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 18 Jan 2024 06:59:55 +0100 Subject: [PATCH 027/169] Add small test on duplicate changes (#273) --- tests/catalog/test_sql.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 8bf921aa4d..cb39dfadd0 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -27,6 +27,7 @@ from pyiceberg.catalog import Identifier from pyiceberg.catalog.sql import SqlCatalog from pyiceberg.exceptions import ( + CommitFailedException, NamespaceAlreadyExistsError, NamespaceNotEmptyError, NoSuchNamespaceError, @@ -719,3 +720,26 @@ def test_commit_table(catalog: SqlCatalog, table_schema_nested: Schema, random_i assert new_schema assert new_schema == update._apply() assert new_schema.find_field("b").field_type == IntegerType() + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +def test_concurrent_commit_table(catalog: SqlCatalog, table_schema_simple: Schema, random_identifier: Identifier) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table_a = catalog.create_table(random_identifier, table_schema_simple) + table_b = catalog.load_table(random_identifier) + + with table_a.update_schema() as update: + update.add_column(path="b", field_type=IntegerType()) + + with pytest.raises(CommitFailedException, match="Requirement failed: current schema id has changed: expected 0, found 1"): + # This one should fail since it already has been updated + with table_b.update_schema() as update: + update.add_column(path="c", field_type=IntegerType()) From 8f7927b840594adf44f74adaaea105c4cb241a42 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 18 Jan 2024 11:19:17 +0100 Subject: [PATCH 028/169] Write support (#41) --- mkdocs/docs/api.md | 98 +++++ poetry.lock | 36 +- pyiceberg/io/pyarrow.py | 82 +++- pyiceberg/manifest.py | 100 +++-- pyiceberg/table/__init__.py | 335 ++++++++++++++- pyiceberg/table/snapshots.py | 36 +- pyproject.toml | 1 + .../{test_catalogs.py => test_reads.py} | 0 tests/integration/test_writes.py | 387 ++++++++++++++++++ tests/io/test_pyarrow_stats.py | 67 +-- tests/table/test_init.py | 6 +- tests/table/test_snapshots.py | 14 +- tests/utils/test_manifest.py | 9 +- 13 files changed, 1035 insertions(+), 136 deletions(-) rename tests/integration/{test_catalogs.py => test_reads.py} (100%) create mode 100644 tests/integration/test_writes.py diff --git a/mkdocs/docs/api.md b/mkdocs/docs/api.md index 517e52f185..9d97d4f676 100644 --- a/mkdocs/docs/api.md +++ b/mkdocs/docs/api.md @@ -175,6 +175,104 @@ static_table = StaticTable.from_metadata( The static-table is considered read-only. +## Write support + +With PyIceberg 0.6.0 write support is added through Arrow. Let's consider an Arrow Table: + +```python +import pyarrow as pa + +df = pa.Table.from_pylist( + [ + {"city": "Amsterdam", "lat": 52.371807, "long": 4.896029}, + {"city": "San Francisco", "lat": 37.773972, "long": -122.431297}, + {"city": "Drachten", "lat": 53.11254, "long": 6.0989}, + {"city": "Paris", "lat": 48.864716, "long": 2.349014}, + ], +) +``` + +Next, create a table based on the schema: + +```python +from pyiceberg.catalog import load_catalog + +catalog = load_catalog("default") + +from pyiceberg.schema import Schema +from pyiceberg.types import NestedField, StringType, DoubleType + +schema = Schema( + NestedField(1, "city", StringType(), required=False), + NestedField(2, "lat", DoubleType(), required=False), + NestedField(3, "long", DoubleType(), required=False), +) + +tbl = catalog.create_table("default.cities", schema=schema) +``` + +Now write the data to the table: + + + +!!! note inline end "Fast append" + PyIceberg default to the [fast append](https://iceberg.apache.org/spec/#snapshots) to minimize the amount of data written. This enables quick writes, reducing the possibility of conflicts. The downside of the fast append is that it creates more metadata than a normal commit. [Compaction is planned](https://github.com/apache/iceberg-python/issues/270) and will automatically rewrite all the metadata when a threshold is hit, to maintain performant reads. + + + +```python +tbl.append(df) + +# or + +tbl.overwrite(df) +``` + +The data is written to the table, and when the table is read using `tbl.scan().to_arrow()`: + +``` +pyarrow.Table +city: string +lat: double +long: double +---- +city: [["Amsterdam","San Francisco","Drachten","Paris"]] +lat: [[52.371807,37.773972,53.11254,48.864716]] +long: [[4.896029,-122.431297,6.0989,2.349014]] +``` + +You both can use `append(df)` or `overwrite(df)` since there is no data yet. If we want to add more data, we can use `.append()` again: + +```python +df = pa.Table.from_pylist( + [{"city": "Groningen", "lat": 53.21917, "long": 6.56667}], +) + +tbl.append(df) +``` + +When reading the table `tbl.scan().to_arrow()` you can see that `Groningen` is now also part of the table: + +``` +pyarrow.Table +city: string +lat: double +long: double +---- +city: [["Amsterdam","San Francisco","Drachten","Paris"],["Groningen"]] +lat: [[52.371807,37.773972,53.11254,48.864716],[53.21917]] +long: [[4.896029,-122.431297,6.0989,2.349014],[6.56667]] +``` + +The nested lists indicate the different Arrow buffers, where the first write results into a buffer, and the second append in a separate buffer. This is expected since it will read two parquet files. + + + +!!! example "Under development" + Writing using PyIceberg is still under development. Support for [partial overwrites](https://github.com/apache/iceberg-python/issues/268) and writing to [partitioned tables](https://github.com/apache/iceberg-python/issues/208) is planned and being worked on. + + + ## Schema evolution PyIceberg supports full schema evolution through the Python API. It takes care of setting the field-IDs and makes sure that only non-breaking changes are done (can be overriden). diff --git a/poetry.lock b/poetry.lock index 3a22111bd7..8fc927ce95 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2566,7 +2566,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -2575,8 +2574,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -2628,6 +2625,17 @@ files = [ [package.extras] dev = ["black (==22.6.0)", "flake8", "mypy", "pytest"] +[[package]] +name = "py4j" +version = "0.10.9.7" +description = "Enables Python programs to dynamically access arbitrary Java objects" +optional = false +python-versions = "*" +files = [ + {file = "py4j-0.10.9.7-py2.py3-none-any.whl", hash = "sha256:85defdfd2b2376eb3abf5ca6474b51ab7e0de341c75a02f46dc9b5976f5a5c1b"}, + {file = "py4j-0.10.9.7.tar.gz", hash = "sha256:0b6e5315bb3ada5cf62ac651d107bb2ebc02def3dee9d9548e3baac644ea8dbb"}, +] + [[package]] name = "pyarrow" version = "14.0.2" @@ -2910,6 +2918,26 @@ files = [ [package.dependencies] tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +[[package]] +name = "pyspark" +version = "3.4.2" +description = "Apache Spark Python API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyspark-3.4.2.tar.gz", hash = "sha256:088db1b8ff33a748b802f1710ff6f6dcef0e0f2cca7d69bbbe55b187a0d55c3f"}, +] + +[package.dependencies] +py4j = "0.10.9.7" + +[package.extras] +connect = ["googleapis-common-protos (>=1.56.4)", "grpcio (>=1.48.1)", "grpcio-status (>=1.48.1)", "numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] +ml = ["numpy (>=1.15)"] +mllib = ["numpy (>=1.15)"] +pandas-on-spark = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] +sql = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] + [[package]] name = "pytest" version = "7.4.4" @@ -4189,4 +4217,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "54d7c0ea7f06959ce2fb8c4c36b595881e56c59dc1e97bbc4a859bcc25dac542" +content-hash = "744b1c2e8e96c8626732849a1349f614ba7fdd01491a3488d850979b8192787c" diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index e116cd0a38..b4988c677a 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -105,7 +105,11 @@ OutputFile, OutputStream, ) -from pyiceberg.manifest import DataFile, FileFormat +from pyiceberg.manifest import ( + DataFile, + DataFileContent, + FileFormat, +) from pyiceberg.schema import ( PartnerAccessor, PreOrderSchemaVisitor, @@ -119,8 +123,9 @@ visit, visit_with_partner, ) +from pyiceberg.table import WriteTask from pyiceberg.transforms import TruncateTransform -from pyiceberg.typedef import EMPTY_DICT, Properties +from pyiceberg.typedef import EMPTY_DICT, Properties, Record from pyiceberg.types import ( BinaryType, BooleanType, @@ -1443,9 +1448,8 @@ def parquet_path_to_id_mapping( def fill_parquet_file_metadata( - df: DataFile, + data_file: DataFile, parquet_metadata: pq.FileMetaData, - file_size: int, stats_columns: Dict[int, StatisticsCollector], parquet_column_mapping: Dict[str, int], ) -> None: @@ -1453,8 +1457,6 @@ def fill_parquet_file_metadata( Compute and fill the following fields of the DataFile object. - file_format - - record_count - - file_size_in_bytes - column_sizes - value_counts - null_value_counts @@ -1464,11 +1466,8 @@ def fill_parquet_file_metadata( - split_offsets Args: - df (DataFile): A DataFile object representing the Parquet file for which metadata is to be filled. + data_file (DataFile): A DataFile object representing the Parquet file for which metadata is to be filled. parquet_metadata (pyarrow.parquet.FileMetaData): A pyarrow metadata object. - file_size (int): The total compressed file size cannot be retrieved from the metadata and hence has to - be passed here. Depending on the kind of file system and pyarrow library call used, different - ways to obtain this value might be appropriate. stats_columns (Dict[int, StatisticsCollector]): The statistics gathering plan. It is required to set the mode for column metrics collection """ @@ -1565,13 +1564,56 @@ def fill_parquet_file_metadata( del upper_bounds[field_id] del null_value_counts[field_id] - df.file_format = FileFormat.PARQUET - df.record_count = parquet_metadata.num_rows - df.file_size_in_bytes = file_size - df.column_sizes = column_sizes - df.value_counts = value_counts - df.null_value_counts = null_value_counts - df.nan_value_counts = nan_value_counts - df.lower_bounds = lower_bounds - df.upper_bounds = upper_bounds - df.split_offsets = split_offsets + data_file.record_count = parquet_metadata.num_rows + data_file.column_sizes = column_sizes + data_file.value_counts = value_counts + data_file.null_value_counts = null_value_counts + data_file.nan_value_counts = nan_value_counts + data_file.lower_bounds = lower_bounds + data_file.upper_bounds = upper_bounds + data_file.split_offsets = split_offsets + + +def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: + task = next(tasks) + + try: + _ = next(tasks) + # If there are more tasks, raise an exception + raise NotImplementedError("Only unpartitioned writes are supported: https://github.com/apache/iceberg-python/issues/208") + except StopIteration: + pass + + file_path = f'{table.location()}/data/{task.generate_data_file_filename("parquet")}' + file_schema = schema_to_pyarrow(table.schema()) + + collected_metrics: List[pq.FileMetaData] = [] + fo = table.io.new_output(file_path) + with fo.create(overwrite=True) as fos: + with pq.ParquetWriter(fos, schema=file_schema, version="1.0", metadata_collector=collected_metrics) as writer: + writer.write_table(task.df) + + data_file = DataFile( + content=DataFileContent.DATA, + file_path=file_path, + file_format=FileFormat.PARQUET, + partition=Record(), + file_size_in_bytes=len(fo), + sort_order_id=task.sort_order_id, + # Just copy these from the table for now + spec_id=table.spec().spec_id, + equality_ids=None, + key_metadata=None, + ) + + if len(collected_metrics) != 1: + # One file has been written + raise ValueError(f"Expected 1 entry, got: {collected_metrics}") + + fill_parquet_file_metadata( + data_file=data_file, + parquet_metadata=collected_metrics[0], + stats_columns=compute_statistics_plan(table.schema(), table.properties), + parquet_column_mapping=parquet_path_to_id_mapping(table.schema()), + ) + return iter([data_file]) diff --git a/pyiceberg/manifest.py b/pyiceberg/manifest.py index 0ddfcd28d5..0504626d07 100644 --- a/pyiceberg/manifest.py +++ b/pyiceberg/manifest.py @@ -37,7 +37,7 @@ from pyiceberg.io import FileIO, InputFile, OutputFile from pyiceberg.partitioning import PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.typedef import Record +from pyiceberg.typedef import EMPTY_DICT, Record from pyiceberg.types import ( BinaryType, BooleanType, @@ -60,6 +60,8 @@ DEFAULT_BLOCK_SIZE = 67108864 # 64 * 1024 * 1024 DEFAULT_READ_VERSION: Literal[2] = 2 +INITIAL_SEQUENCE_NUMBER = 0 + class DataFileContent(int, Enum): DATA = 0 @@ -491,25 +493,42 @@ def construct_partition_summaries(spec: PartitionSpec, schema: Schema, partition return [field.to_summary() for field in field_stats] -MANIFEST_FILE_SCHEMA: Schema = Schema( - NestedField(500, "manifest_path", StringType(), required=True, doc="Location URI with FS scheme"), - NestedField(501, "manifest_length", LongType(), required=True), - NestedField(502, "partition_spec_id", IntegerType(), required=True), - NestedField(517, "content", IntegerType(), required=False, initial_default=ManifestContent.DATA), - NestedField(515, "sequence_number", LongType(), required=False, initial_default=0), - NestedField(516, "min_sequence_number", LongType(), required=False, initial_default=0), - NestedField(503, "added_snapshot_id", LongType(), required=False), - NestedField(504, "added_files_count", IntegerType(), required=False), - NestedField(505, "existing_files_count", IntegerType(), required=False), - NestedField(506, "deleted_files_count", IntegerType(), required=False), - NestedField(512, "added_rows_count", LongType(), required=False), - NestedField(513, "existing_rows_count", LongType(), required=False), - NestedField(514, "deleted_rows_count", LongType(), required=False), - NestedField(507, "partitions", ListType(508, PARTITION_FIELD_SUMMARY_TYPE, element_required=True), required=False), - NestedField(519, "key_metadata", BinaryType(), required=False), -) +MANIFEST_LIST_FILE_SCHEMAS: Dict[int, Schema] = { + 1: Schema( + NestedField(500, "manifest_path", StringType(), required=True, doc="Location URI with FS scheme"), + NestedField(501, "manifest_length", LongType(), required=True), + NestedField(502, "partition_spec_id", IntegerType(), required=True), + NestedField(503, "added_snapshot_id", LongType(), required=True), + NestedField(504, "added_files_count", IntegerType(), required=False), + NestedField(505, "existing_files_count", IntegerType(), required=False), + NestedField(506, "deleted_files_count", IntegerType(), required=False), + NestedField(512, "added_rows_count", LongType(), required=False), + NestedField(513, "existing_rows_count", LongType(), required=False), + NestedField(514, "deleted_rows_count", LongType(), required=False), + NestedField(507, "partitions", ListType(508, PARTITION_FIELD_SUMMARY_TYPE, element_required=True), required=False), + NestedField(519, "key_metadata", BinaryType(), required=False), + ), + 2: Schema( + NestedField(500, "manifest_path", StringType(), required=True, doc="Location URI with FS scheme"), + NestedField(501, "manifest_length", LongType(), required=True), + NestedField(502, "partition_spec_id", IntegerType(), required=True), + NestedField(517, "content", IntegerType(), required=True, initial_default=ManifestContent.DATA), + NestedField(515, "sequence_number", LongType(), required=True, initial_default=0), + NestedField(516, "min_sequence_number", LongType(), required=True, initial_default=0), + NestedField(503, "added_snapshot_id", LongType(), required=True), + NestedField(504, "added_files_count", IntegerType(), required=True), + NestedField(505, "existing_files_count", IntegerType(), required=True), + NestedField(506, "deleted_files_count", IntegerType(), required=True), + NestedField(512, "added_rows_count", LongType(), required=True), + NestedField(513, "existing_rows_count", LongType(), required=True), + NestedField(514, "deleted_rows_count", LongType(), required=True), + NestedField(507, "partitions", ListType(508, PARTITION_FIELD_SUMMARY_TYPE, element_required=True), required=False), + NestedField(519, "key_metadata", BinaryType(), required=False), + ), +} + +MANIFEST_LIST_FILE_STRUCTS = {format_version: schema.as_struct() for format_version, schema in MANIFEST_LIST_FILE_SCHEMAS.items()} -MANIFEST_FILE_SCHEMA_STRUCT = MANIFEST_FILE_SCHEMA.as_struct() POSITIONAL_DELETE_SCHEMA = Schema( NestedField(2147483546, "file_path", StringType()), NestedField(2147483545, "pos", IntegerType()) @@ -551,7 +570,7 @@ class ManifestFile(Record): key_metadata: Optional[bytes] def __init__(self, *data: Any, **named_data: Any) -> None: - super().__init__(*data, **{"struct": MANIFEST_FILE_SCHEMA_STRUCT, **named_data}) + super().__init__(*data, **{"struct": MANIFEST_LIST_FILE_STRUCTS[DEFAULT_READ_VERSION], **named_data}) def has_added_files(self) -> bool: return self.added_files_count is None or self.added_files_count > 0 @@ -596,7 +615,7 @@ def read_manifest_list(input_file: InputFile) -> Iterator[ManifestFile]: """ with AvroFile[ManifestFile]( input_file, - MANIFEST_FILE_SCHEMA, + MANIFEST_LIST_FILE_SCHEMAS[DEFAULT_READ_VERSION], read_types={-1: ManifestFile, 508: PartitionFieldSummary}, read_enums={517: ManifestContent}, ) as reader: @@ -659,7 +678,9 @@ class ManifestWriter(ABC): _min_data_sequence_number: Optional[int] _partitions: List[Record] - def __init__(self, spec: PartitionSpec, schema: Schema, output_file: OutputFile, snapshot_id: int, meta: Dict[str, str]): + def __init__( + self, spec: PartitionSpec, schema: Schema, output_file: OutputFile, snapshot_id: int, meta: Dict[str, str] = EMPTY_DICT + ) -> None: self.closed = False self._spec = spec self._schema = schema @@ -737,7 +758,7 @@ def to_manifest_file(self) -> ManifestFile: existing_rows_count=self._existing_rows, deleted_rows_count=self._deleted_rows, partitions=construct_partition_summaries(self._spec, self._schema, self._partitions), - key_metadatas=None, + key_metadata=None, ) def add_entry(self, entry: ManifestEntry) -> ManifestWriter: @@ -836,13 +857,15 @@ def write_manifest( class ManifestListWriter(ABC): + _format_version: Literal[1, 2] _output_file: OutputFile _meta: Dict[str, str] _manifest_files: List[ManifestFile] _commit_snapshot_id: int _writer: AvroOutputFile[ManifestFile] - def __init__(self, output_file: OutputFile, meta: Dict[str, str]): + def __init__(self, format_version: Literal[1, 2], output_file: OutputFile, meta: Dict[str, Any]): + self._format_version = format_version self._output_file = output_file self._meta = meta self._manifest_files = [] @@ -850,7 +873,11 @@ def __init__(self, output_file: OutputFile, meta: Dict[str, str]): def __enter__(self) -> ManifestListWriter: """Open the writer for writing.""" self._writer = AvroOutputFile[ManifestFile]( - output_file=self._output_file, file_schema=MANIFEST_FILE_SCHEMA, schema_name="manifest_file", metadata=self._meta + output_file=self._output_file, + record_schema=MANIFEST_LIST_FILE_SCHEMAS[DEFAULT_READ_VERSION], + file_schema=MANIFEST_LIST_FILE_SCHEMAS[self._format_version], + schema_name="manifest_file", + metadata=self._meta, ) self._writer.__enter__() return self @@ -874,9 +901,11 @@ def add_manifests(self, manifest_files: List[ManifestFile]) -> ManifestListWrite class ManifestListWriterV1(ManifestListWriter): - def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: int): + def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: Optional[int]): super().__init__( - output_file, {"snapshot-id": str(snapshot_id), "parent-snapshot-id": str(parent_snapshot_id), "format-version": "1"} + format_version=1, + output_file=output_file, + meta={"snapshot-id": str(snapshot_id), "parent-snapshot-id": str(parent_snapshot_id), "format-version": "1"}, ) def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: @@ -889,10 +918,11 @@ class ManifestListWriterV2(ManifestListWriter): _commit_snapshot_id: int _sequence_number: int - def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: int, sequence_number: int): + def __init__(self, output_file: OutputFile, snapshot_id: int, parent_snapshot_id: Optional[int], sequence_number: int): super().__init__( - output_file, - { + format_version=2, + output_file=output_file, + meta={ "snapshot-id": str(snapshot_id), "parent-snapshot-id": str(parent_snapshot_id), "sequence-number": str(sequence_number), @@ -910,7 +940,7 @@ def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: # To validate this, check that the snapshot id matches the current commit if self._commit_snapshot_id != wrapped_manifest_file.added_snapshot_id: raise ValueError( - f"Found unassigned sequence number for a manifest from snapshot: {wrapped_manifest_file.added_snapshot_id}" + f"Found unassigned sequence number for a manifest from snapshot: {self._commit_snapshot_id} != {wrapped_manifest_file.added_snapshot_id}" ) wrapped_manifest_file.sequence_number = self._sequence_number @@ -926,11 +956,17 @@ def prepare_manifest(self, manifest_file: ManifestFile) -> ManifestFile: def write_manifest_list( - format_version: Literal[1, 2], output_file: OutputFile, snapshot_id: int, parent_snapshot_id: int, sequence_number: int + format_version: Literal[1, 2], + output_file: OutputFile, + snapshot_id: int, + parent_snapshot_id: Optional[int], + sequence_number: Optional[int], ) -> ManifestListWriter: if format_version == 1: return ManifestListWriterV1(output_file, snapshot_id, parent_snapshot_id) elif format_version == 2: + if sequence_number is None: + raise ValueError(f"Sequence-number is required for V2 tables: {sequence_number}") return ManifestListWriterV2(output_file, snapshot_id, parent_snapshot_id, sequence_number) else: raise ValueError(f"Cannot write manifest list for table version: {format_version}") diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 5292c5182c..7c1b0bdecc 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -62,7 +62,10 @@ DataFileContent, ManifestContent, ManifestEntry, + ManifestEntryStatus, ManifestFile, + write_manifest, + write_manifest_list, ) from pyiceberg.partitioning import PartitionSpec from pyiceberg.schema import ( @@ -79,7 +82,14 @@ TableMetadataUtil, ) from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef -from pyiceberg.table.snapshots import Snapshot, SnapshotLogEntry +from pyiceberg.table.snapshots import ( + Operation, + Snapshot, + SnapshotLogEntry, + SnapshotSummaryCollector, + Summary, + update_snapshot_summaries, +) from pyiceberg.table.sorting import SortOrder from pyiceberg.typedef import ( EMPTY_DICT, @@ -111,6 +121,8 @@ ALWAYS_TRUE = AlwaysTrue() TABLE_ROOT_ID = -1 +_JAVA_LONG_MAX = 9223372036854775807 + class Transaction: _table: Table @@ -210,6 +222,47 @@ def set_properties(self, **updates: str) -> Transaction: """ return self._append_updates(SetPropertiesUpdate(updates=updates)) + def add_snapshot(self, snapshot: Snapshot) -> Transaction: + """Add a new snapshot to the table. + + Returns: + The transaction with the add-snapshot staged. + """ + self._append_updates(AddSnapshotUpdate(snapshot=snapshot)) + self._append_requirements(AssertTableUUID(uuid=self._table.metadata.table_uuid)) + + return self + + def set_ref_snapshot( + self, + snapshot_id: int, + parent_snapshot_id: Optional[int], + ref_name: str, + type: str, + max_age_ref_ms: Optional[int] = None, + max_snapshot_age_ms: Optional[int] = None, + min_snapshots_to_keep: Optional[int] = None, + ) -> Transaction: + """Update a ref to a snapshot. + + Returns: + The transaction with the set-snapshot-ref staged + """ + self._append_updates( + SetSnapshotRefUpdate( + snapshot_id=snapshot_id, + parent_snapshot_id=parent_snapshot_id, + ref_name=ref_name, + type=type, + max_age_ref_ms=max_age_ref_ms, + max_snapshot_age_ms=max_snapshot_age_ms, + min_snapshots_to_keep=min_snapshots_to_keep, + ) + ) + + self._append_requirements(AssertRefSnapshotId(snapshot_id=parent_snapshot_id, ref="main")) + return self + def update_schema(self) -> UpdateSchema: """Create a new UpdateSchema to alter the columns of this table. @@ -609,7 +662,7 @@ class AssertRefSnapshotId(TableRequirement): """ type: Literal["assert-ref-snapshot-id"] = Field(default="assert-ref-snapshot-id") - ref: str + ref: str = Field(...) snapshot_id: Optional[int] = Field(default=None, alias="snapshot-id") def validate(self, base_metadata: Optional[TableMetadata]) -> None: @@ -822,8 +875,8 @@ def location(self) -> str: def last_sequence_number(self) -> int: return self.metadata.last_sequence_number - def _next_sequence_number(self) -> int: - return INITIAL_SEQUENCE_NUMBER if self.format_version == 1 else self.last_sequence_number + 1 + def next_sequence_number(self) -> int: + return self.last_sequence_number + 1 if self.metadata.format_version > 1 else INITIAL_SEQUENCE_NUMBER def new_snapshot_id(self) -> int: """Generate a new snapshot-id that's not in use.""" @@ -856,6 +909,55 @@ def history(self) -> List[SnapshotLogEntry]: def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: return UpdateSchema(self, allow_incompatible_changes=allow_incompatible_changes, case_sensitive=case_sensitive) + def append(self, df: pa.Table) -> None: + """ + Append data to the table. + + Args: + df: The Arrow dataframe that will be appended to overwrite the table + """ + if len(self.spec().fields) > 0: + raise ValueError("Cannot write to partitioned tables") + + if len(self.sort_order().fields) > 0: + raise ValueError("Cannot write to tables with a sort-order") + + data_files = _dataframe_to_data_files(self, df=df) + merge = _MergingSnapshotProducer(operation=Operation.APPEND, table=self) + for data_file in data_files: + merge.append_data_file(data_file) + + merge.commit() + + def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_TRUE) -> None: + """ + Overwrite all the data in the table. + + Args: + df: The Arrow dataframe that will be used to overwrite the table + overwrite_filter: ALWAYS_TRUE when you overwrite all the data, + or a boolean expression in case of a partial overwrite + """ + if overwrite_filter != AlwaysTrue(): + raise NotImplementedError("Cannot overwrite a subset of a table") + + if len(self.spec().fields) > 0: + raise ValueError("Cannot write to partitioned tables") + + if len(self.sort_order().fields) > 0: + raise ValueError("Cannot write to tables with a sort-order") + + data_files = _dataframe_to_data_files(self, df=df) + merge = _MergingSnapshotProducer( + operation=Operation.OVERWRITE if self.current_snapshot() is not None else Operation.APPEND, + table=self, + ) + + for data_file in data_files: + merge.append_data_file(data_file) + + merge.commit() + def refs(self) -> Dict[str, SnapshotRef]: """Return the snapshot references in the table.""" return self.metadata.refs @@ -1068,7 +1170,7 @@ def _min_data_file_sequence_number(manifests: List[ManifestFile]) -> int: return INITIAL_SEQUENCE_NUMBER -def _match_deletes_to_datafile(data_entry: ManifestEntry, positional_delete_entries: SortedList[ManifestEntry]) -> Set[DataFile]: +def _match_deletes_to_data_file(data_entry: ManifestEntry, positional_delete_entries: SortedList[ManifestEntry]) -> Set[DataFile]: """Check if the delete file is relevant for the data file. Using the column metrics to see if the filename is in the lower and upper bound. @@ -1212,7 +1314,7 @@ def plan_files(self) -> Iterable[FileScanTask]: return [ FileScanTask( data_entry.data_file, - delete_files=_match_deletes_to_datafile( + delete_files=_match_deletes_to_data_file( data_entry, positional_delete_entries, ), @@ -1935,3 +2037,224 @@ def _generate_snapshot_id() -> int: snapshot_id = snapshot_id if snapshot_id >= 0 else snapshot_id * -1 return snapshot_id + + +@dataclass(frozen=True) +class WriteTask: + write_uuid: uuid.UUID + task_id: int + df: pa.Table + sort_order_id: Optional[int] = None + + # Later to be extended with partition information + + def generate_data_file_filename(self, extension: str) -> str: + # Mimics the behavior in the Java API: + # https://github.com/apache/iceberg/blob/a582968975dd30ff4917fbbe999f1be903efac02/core/src/main/java/org/apache/iceberg/io/OutputFileFactory.java#L92-L101 + return f"00000-{self.task_id}-{self.write_uuid}.{extension}" + + +def _new_manifest_path(location: str, num: int, commit_uuid: uuid.UUID) -> str: + return f'{location}/metadata/{commit_uuid}-m{num}.avro' + + +def _generate_manifest_list_path(location: str, snapshot_id: int, attempt: int, commit_uuid: uuid.UUID) -> str: + # Mimics the behavior in Java: + # https://github.com/apache/iceberg/blob/c862b9177af8e2d83122220764a056f3b96fd00c/core/src/main/java/org/apache/iceberg/SnapshotProducer.java#L491 + return f'{location}/metadata/snap-{snapshot_id}-{attempt}-{commit_uuid}.avro' + + +def _dataframe_to_data_files(table: Table, df: pa.Table) -> Iterable[DataFile]: + from pyiceberg.io.pyarrow import write_file + + if len(table.spec().fields) > 0: + raise ValueError("Cannot write to partitioned tables") + + if len(table.sort_order().fields) > 0: + raise ValueError("Cannot write to tables with a sort-order") + + write_uuid = uuid.uuid4() + counter = itertools.count(0) + + # This is an iter, so we don't have to materialize everything every time + # This will be more relevant when we start doing partitioned writes + yield from write_file(table, iter([WriteTask(write_uuid, next(counter), df)])) + + +class _MergingSnapshotProducer: + _operation: Operation + _table: Table + _snapshot_id: int + _parent_snapshot_id: Optional[int] + _added_data_files: List[DataFile] + _commit_uuid: uuid.UUID + + def __init__(self, operation: Operation, table: Table) -> None: + self._operation = operation + self._table = table + self._snapshot_id = table.new_snapshot_id() + # Since we only support the main branch for now + self._parent_snapshot_id = snapshot.snapshot_id if (snapshot := self._table.current_snapshot()) else None + self._added_data_files = [] + self._commit_uuid = uuid.uuid4() + + def append_data_file(self, data_file: DataFile) -> _MergingSnapshotProducer: + self._added_data_files.append(data_file) + return self + + def _deleted_entries(self) -> List[ManifestEntry]: + """To determine if we need to record any deleted entries. + + With partial overwrites we have to use the predicate to evaluate + which entries are affected. + """ + if self._operation == Operation.OVERWRITE: + if self._parent_snapshot_id is not None: + previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) + if previous_snapshot is None: + # This should never happen since you cannot overwrite an empty table + raise ValueError(f"Could not find the previous snapshot: {self._parent_snapshot_id}") + + executor = ExecutorFactory.get_or_create() + + def _get_entries(manifest: ManifestFile) -> List[ManifestEntry]: + return [ + ManifestEntry( + status=ManifestEntryStatus.DELETED, + snapshot_id=entry.snapshot_id, + data_sequence_number=entry.data_sequence_number, + file_sequence_number=entry.file_sequence_number, + data_file=entry.data_file, + ) + for entry in manifest.fetch_manifest_entry(self._table.io, discard_deleted=True) + if entry.data_file.content == DataFileContent.DATA + ] + + list_of_entries = executor.map(_get_entries, previous_snapshot.manifests(self._table.io)) + return list(chain(*list_of_entries)) + return [] + elif self._operation == Operation.APPEND: + return [] + else: + raise ValueError(f"Not implemented for: {self._operation}") + + def _manifests(self) -> List[ManifestFile]: + def _write_added_manifest() -> List[ManifestFile]: + if self._added_data_files: + output_file_location = _new_manifest_path(location=self._table.location(), num=0, commit_uuid=self._commit_uuid) + with write_manifest( + format_version=self._table.format_version, + spec=self._table.spec(), + schema=self._table.schema(), + output_file=self._table.io.new_output(output_file_location), + snapshot_id=self._snapshot_id, + ) as writer: + for data_file in self._added_data_files: + writer.add_entry( + ManifestEntry( + status=ManifestEntryStatus.ADDED, + snapshot_id=self._snapshot_id, + data_sequence_number=None, + file_sequence_number=None, + data_file=data_file, + ) + ) + return [writer.to_manifest_file()] + else: + return [] + + def _write_delete_manifest() -> List[ManifestFile]: + # Check if we need to mark the files as deleted + deleted_entries = self._deleted_entries() + if deleted_entries: + output_file_location = _new_manifest_path(location=self._table.location(), num=1, commit_uuid=self._commit_uuid) + with write_manifest( + format_version=self._table.format_version, + spec=self._table.spec(), + schema=self._table.schema(), + output_file=self._table.io.new_output(output_file_location), + snapshot_id=self._snapshot_id, + ) as writer: + for delete_entry in deleted_entries: + writer.add_entry(delete_entry) + return [writer.to_manifest_file()] + else: + return [] + + def _fetch_existing_manifests() -> List[ManifestFile]: + existing_manifests = [] + + # Add existing manifests + if self._operation == Operation.APPEND and self._parent_snapshot_id is not None: + # In case we want to append, just add the existing manifests + previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) + + if previous_snapshot is None: + raise ValueError(f"Snapshot could not be found: {self._parent_snapshot_id}") + + for manifest in previous_snapshot.manifests(io=self._table.io): + if ( + manifest.has_added_files() + or manifest.has_existing_files() + or manifest.added_snapshot_id == self._snapshot_id + ): + existing_manifests.append(manifest) + + return existing_manifests + + executor = ExecutorFactory.get_or_create() + + added_manifests = executor.submit(_write_added_manifest) + delete_manifests = executor.submit(_write_delete_manifest) + existing_manifests = executor.submit(_fetch_existing_manifests) + + return added_manifests.result() + delete_manifests.result() + existing_manifests.result() + + def _summary(self) -> Summary: + ssc = SnapshotSummaryCollector() + + for data_file in self._added_data_files: + ssc.add_file(data_file=data_file) + + previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) if self._parent_snapshot_id is not None else None + + return update_snapshot_summaries( + summary=Summary(operation=self._operation, **ssc.build()), + previous_summary=previous_snapshot.summary if previous_snapshot is not None else None, + truncate_full_table=self._operation == Operation.OVERWRITE, + ) + + def commit(self) -> Snapshot: + new_manifests = self._manifests() + next_sequence_number = self._table.next_sequence_number() + + summary = self._summary() + + manifest_list_file_path = _generate_manifest_list_path( + location=self._table.location(), snapshot_id=self._snapshot_id, attempt=0, commit_uuid=self._commit_uuid + ) + with write_manifest_list( + format_version=self._table.metadata.format_version, + output_file=self._table.io.new_output(manifest_list_file_path), + snapshot_id=self._snapshot_id, + parent_snapshot_id=self._parent_snapshot_id, + sequence_number=next_sequence_number, + ) as writer: + writer.add_manifests(new_manifests) + + snapshot = Snapshot( + snapshot_id=self._snapshot_id, + parent_snapshot_id=self._parent_snapshot_id, + manifest_list=manifest_list_file_path, + sequence_number=next_sequence_number, + summary=summary, + schema_id=self._table.schema().schema_id, + ) + + with self._table.transaction() as tx: + tx.add_snapshot(snapshot=snapshot) + tx.set_ref_snapshot( + snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, ref_name="main", type="branch" + ) + + return snapshot diff --git a/pyiceberg/table/snapshots.py b/pyiceberg/table/snapshots.py index e22c95f8ee..a2f15d4405 100644 --- a/pyiceberg/table/snapshots.py +++ b/pyiceberg/table/snapshots.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import time from enum import Enum from typing import ( Any, @@ -138,7 +139,7 @@ class Snapshot(IcebergBaseModel): snapshot_id: int = Field(alias="snapshot-id") parent_snapshot_id: Optional[int] = Field(alias="parent-snapshot-id", default=None) sequence_number: Optional[int] = Field(alias="sequence-number", default=None) - timestamp_ms: int = Field(alias="timestamp-ms") + timestamp_ms: int = Field(alias="timestamp-ms", default_factory=lambda: int(time.time() * 1000)) manifest_list: Optional[str] = Field( alias="manifest-list", description="Location of the snapshot's manifest list file", default=None ) @@ -277,23 +278,30 @@ def _truncate_table_summary(summary: Summary, previous_summary: Mapping[str, str }: summary[prop] = '0' - if value := previous_summary.get(TOTAL_DATA_FILES): - summary[DELETED_DATA_FILES] = value - if value := previous_summary.get(TOTAL_DELETE_FILES): - summary[REMOVED_DELETE_FILES] = value - if value := previous_summary.get(TOTAL_RECORDS): - summary[DELETED_RECORDS] = value - if value := previous_summary.get(TOTAL_FILE_SIZE): - summary[REMOVED_FILE_SIZE] = value - if value := previous_summary.get(TOTAL_POSITION_DELETES): - summary[REMOVED_POSITION_DELETES] = value - if value := previous_summary.get(TOTAL_EQUALITY_DELETES): - summary[REMOVED_EQUALITY_DELETES] = value + def get_prop(prop: str) -> int: + value = previous_summary.get(prop) or '0' + try: + return int(value) + except ValueError as e: + raise ValueError(f"Could not parse summary property {prop} to an int: {value}") from e + + if value := get_prop(TOTAL_DATA_FILES): + summary[DELETED_DATA_FILES] = str(value) + if value := get_prop(TOTAL_DELETE_FILES): + summary[REMOVED_DELETE_FILES] = str(value) + if value := get_prop(TOTAL_RECORDS): + summary[DELETED_RECORDS] = str(value) + if value := get_prop(TOTAL_FILE_SIZE): + summary[REMOVED_FILE_SIZE] = str(value) + if value := get_prop(TOTAL_POSITION_DELETES): + summary[REMOVED_POSITION_DELETES] = str(value) + if value := get_prop(TOTAL_EQUALITY_DELETES): + summary[REMOVED_EQUALITY_DELETES] = str(value) return summary -def _update_snapshot_summaries( +def update_snapshot_summaries( summary: Summary, previous_summary: Optional[Mapping[str, str]] = None, truncate_full_table: bool = False ) -> Summary: if summary.operation not in {Operation.APPEND, Operation.OVERWRITE}: diff --git a/pyproject.toml b/pyproject.toml index 63299413d5..8766dfe281 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,6 +82,7 @@ requests-mock = "1.11.0" moto = { version = "^4.2.13", extras = ["server"] } typing-extensions = "4.9.0" pytest-mock = "3.12.0" +pyspark = "3.4.2" cython = "3.0.8" [[tool.mypy.overrides]] diff --git a/tests/integration/test_catalogs.py b/tests/integration/test_reads.py similarity index 100% rename from tests/integration/test_catalogs.py rename to tests/integration/test_reads.py diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py new file mode 100644 index 0000000000..f8317e481d --- /dev/null +++ b/tests/integration/test_writes.py @@ -0,0 +1,387 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint:disable=redefined-outer-name +import uuid +from datetime import date, datetime + +import pyarrow as pa +import pytest +from pyspark.sql import SparkSession + +from pyiceberg.catalog import Catalog, load_catalog +from pyiceberg.exceptions import NamespaceAlreadyExistsError, NoSuchTableError +from pyiceberg.schema import Schema +from pyiceberg.types import ( + BinaryType, + BooleanType, + DateType, + DoubleType, + FixedType, + FloatType, + IntegerType, + LongType, + NestedField, + StringType, + TimestampType, + TimestamptzType, +) + + +@pytest.fixture() +def catalog() -> Catalog: + catalog = load_catalog( + "local", + **{ + "type": "rest", + "uri": "http://localhost:8181", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) + + try: + catalog.create_namespace("default") + except NamespaceAlreadyExistsError: + pass + + return catalog + + +TEST_DATA_WITH_NULL = { + 'bool': [False, None, True], + 'string': ['a', None, 'z'], + # Go over the 16 bytes to kick in truncation + 'string_long': ['a' * 22, None, 'z' * 22], + 'int': [1, None, 9], + 'long': [1, None, 9], + 'float': [0.0, None, 0.9], + 'double': [0.0, None, 0.9], + 'timestamp': [datetime(2023, 1, 1, 19, 25, 00), None, datetime(2023, 3, 1, 19, 25, 00)], + 'timestamptz': [datetime(2023, 1, 1, 19, 25, 00), None, datetime(2023, 3, 1, 19, 25, 00)], + 'date': [date(2023, 1, 1), None, date(2023, 3, 1)], + # Not supported by Spark + # 'time': [time(1, 22, 0), None, time(19, 25, 0)], + # Not natively supported by Arrow + # 'uuid': [uuid.UUID('00000000-0000-0000-0000-000000000000').bytes, None, uuid.UUID('11111111-1111-1111-1111-111111111111').bytes], + 'binary': [b'\01', None, b'\22'], + 'fixed': [ + uuid.UUID('00000000-0000-0000-0000-000000000000').bytes, + None, + uuid.UUID('11111111-1111-1111-1111-111111111111').bytes, + ], +} + +TABLE_SCHEMA = Schema( + NestedField(field_id=1, name="bool", field_type=BooleanType(), required=False), + NestedField(field_id=2, name="string", field_type=StringType(), required=False), + NestedField(field_id=3, name="string_long", field_type=StringType(), required=False), + NestedField(field_id=4, name="int", field_type=IntegerType(), required=False), + NestedField(field_id=5, name="long", field_type=LongType(), required=False), + NestedField(field_id=6, name="float", field_type=FloatType(), required=False), + NestedField(field_id=7, name="double", field_type=DoubleType(), required=False), + NestedField(field_id=8, name="timestamp", field_type=TimestampType(), required=False), + NestedField(field_id=9, name="timestamptz", field_type=TimestamptzType(), required=False), + NestedField(field_id=10, name="date", field_type=DateType(), required=False), + # NestedField(field_id=11, name="time", field_type=TimeType(), required=False), + # NestedField(field_id=12, name="uuid", field_type=UuidType(), required=False), + NestedField(field_id=12, name="binary", field_type=BinaryType(), required=False), + NestedField(field_id=13, name="fixed", field_type=FixedType(16), required=False), +) + + +@pytest.fixture(scope="session") +def session_catalog() -> Catalog: + return load_catalog( + "local", + **{ + "type": "rest", + "uri": "http://localhost:8181", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) + + +@pytest.fixture(scope="session") +def arrow_table_with_null() -> pa.Table: + """PyArrow table with all kinds of columns""" + pa_schema = pa.schema([ + ("bool", pa.bool_()), + ("string", pa.string()), + ("string_long", pa.string()), + ("int", pa.int32()), + ("long", pa.int64()), + ("float", pa.float32()), + ("double", pa.float64()), + ("timestamp", pa.timestamp(unit="us")), + ("timestamptz", pa.timestamp(unit="us", tz="UTC")), + ("date", pa.date32()), + # Not supported by Spark + # ("time", pa.time64("us")), + # Not natively supported by Arrow + # ("uuid", pa.fixed(16)), + ("binary", pa.binary()), + ("fixed", pa.binary(16)), + ]) + return pa.Table.from_pydict(TEST_DATA_WITH_NULL, schema=pa_schema) + + +@pytest.fixture(scope="session", autouse=True) +def table_v1_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v1_with_null" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + tbl.append(arrow_table_with_null) + + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v1_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v1_appended_with_null" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + + for _ in range(2): + tbl.append(arrow_table_with_null) + + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v2_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v2_with_null" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '2'}) + tbl.append(arrow_table_with_null) + + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v2_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v2_appended_with_null" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '2'}) + + for _ in range(2): + tbl.append(arrow_table_with_null) + + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v1_v2_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v1_v2_appended_with_null" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + tbl.append(arrow_table_with_null) + + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + with tbl.transaction() as tx: + tx.upgrade_table_version(format_version=2) + + tbl.append(arrow_table_with_null) + + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session") +def spark() -> SparkSession: + import os + + os.environ["PYSPARK_SUBMIT_ARGS"] = ( + "--packages org.apache.iceberg:iceberg-spark-runtime-3.4_2.12:1.4.0,org.apache.iceberg:iceberg-aws-bundle:1.4.0 pyspark-shell" + ) + os.environ["AWS_REGION"] = "us-east-1" + os.environ["AWS_ACCESS_KEY_ID"] = "admin" + os.environ["AWS_SECRET_ACCESS_KEY"] = "password" + + spark = ( + SparkSession.builder.appName("PyIceberg integration test") + .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") + .config("spark.sql.catalog.integration", "org.apache.iceberg.spark.SparkCatalog") + .config("spark.sql.catalog.integration.catalog-impl", "org.apache.iceberg.rest.RESTCatalog") + .config("spark.sql.catalog.integration.uri", "http://localhost:8181") + .config("spark.sql.catalog.integration.io-impl", "org.apache.iceberg.aws.s3.S3FileIO") + .config("spark.sql.catalog.integration.warehouse", "s3://warehouse/wh/") + .config("spark.sql.catalog.integration.s3.endpoint", "http://localhost:9000") + .config("spark.sql.catalog.integration.s3.path-style-access", "true") + .config("spark.sql.defaultCatalog", "integration") + .getOrCreate() + ) + + return spark + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_count(spark: SparkSession, format_version: int) -> None: + df = spark.table(f"default.arrow_table_v{format_version}_with_null") + assert df.count() == 3, "Expected 3 rows" + + +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_filter_null(spark: SparkSession, col: str, format_version: int) -> None: + identifier = f"default.arrow_table_v{format_version}_with_null" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 1, f"Expected 1 row for {col}" + assert df.where(f"{col} is not null").count() == 2, f"Expected 2 rows for {col}" + + +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_filter_appended_null(spark: SparkSession, col: str, format_version: int) -> None: + identifier = f"default.arrow_table_v{format_version}_appended_with_null" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 2, f"Expected 1 row for {col}" + assert df.where(f"{col} is not null").count() == 4, f"Expected 2 rows for {col}" + + +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +def test_query_filter_v1_v2_append_null(spark: SparkSession, col: str) -> None: + identifier = "default.arrow_table_v1_v2_appended_with_null" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 2, f"Expected 1 row for {col}" + assert df.where(f"{col} is not null").count() == 4, f"Expected 2 rows for {col}" + + +@pytest.mark.integration +def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_summaries" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + + tbl.append(arrow_table_with_null) + tbl.append(arrow_table_with_null) + tbl.overwrite(arrow_table_with_null) + + rows = spark.sql( + f""" + SELECT operation, summary + FROM {identifier}.snapshots + ORDER BY committed_at ASC + """ + ).collect() + + operations = [row.operation for row in rows] + assert operations == ['append', 'append', 'overwrite'] + + summaries = [row.summary for row in rows] + + assert summaries[0] == { + 'added-data-files': '1', + 'added-files-size': '5283', + 'added-records': '3', + 'total-data-files': '1', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '5283', + 'total-position-deletes': '0', + 'total-records': '3', + } + + assert summaries[1] == { + 'added-data-files': '1', + 'added-files-size': '5283', + 'added-records': '3', + 'total-data-files': '2', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '10566', + 'total-position-deletes': '0', + 'total-records': '6', + } + + assert summaries[2] == { + 'added-data-files': '1', + 'added-files-size': '5283', + 'added-records': '3', + 'deleted-data-files': '2', + 'deleted-records': '6', + 'removed-files-size': '10566', + 'total-data-files': '1', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '5283', + 'total-position-deletes': '0', + 'total-records': '3', + } + + +@pytest.mark.integration +def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_data_files" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + + tbl.overwrite(arrow_table_with_null) + # should produce a DELETE entry + tbl.overwrite(arrow_table_with_null) + # Since we don't rewrite, this should produce a new manifest with an ADDED entry + tbl.append(arrow_table_with_null) + + rows = spark.sql( + f""" + SELECT added_data_files_count, existing_data_files_count, deleted_data_files_count + FROM {identifier}.all_manifests + """ + ).collect() + + assert [row.added_data_files_count for row in rows] == [1, 1, 0, 1, 1] + assert [row.existing_data_files_count for row in rows] == [0, 0, 0, 0, 0] + assert [row.deleted_data_files_count for row in rows] == [0, 0, 1, 0, 0] diff --git a/tests/io/test_pyarrow_stats.py b/tests/io/test_pyarrow_stats.py index 6f00061174..01b844a43e 100644 --- a/tests/io/test_pyarrow_stats.py +++ b/tests/io/test_pyarrow_stats.py @@ -81,7 +81,7 @@ class TestStruct: y: Optional[float] -def construct_test_table() -> Tuple[Any, Any, Union[TableMetadataV1, TableMetadataV2]]: +def construct_test_table() -> Tuple[pq.FileMetaData, Union[TableMetadataV1, TableMetadataV2]]: table_metadata = { "format-version": 2, "location": "s3://bucket/test/location", @@ -172,7 +172,7 @@ def construct_test_table() -> Tuple[Any, Any, Union[TableMetadataV1, TableMetada with pq.ParquetWriter(f, table.schema, metadata_collector=metadata_collector) as writer: writer.write_table(table) - return f.getvalue(), metadata_collector[0], table_metadata + return metadata_collector[0], table_metadata def get_current_schema( @@ -182,45 +182,27 @@ def get_current_schema( def test_record_count() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) assert datafile.record_count == 4 -def test_file_size() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() - - schema = get_current_schema(table_metadata) - datafile = DataFile() - fill_parquet_file_metadata( - datafile, - metadata, - len(file_bytes), - compute_statistics_plan(schema, table_metadata.properties), - parquet_path_to_id_mapping(schema), - ) - - assert datafile.file_size_in_bytes == len(file_bytes) - - def test_value_counts() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -236,14 +218,13 @@ def test_value_counts() -> None: def test_column_sizes() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -258,14 +239,13 @@ def test_column_sizes() -> None: def test_null_and_nan_counts() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -287,14 +267,13 @@ def test_null_and_nan_counts() -> None: def test_bounds() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -332,7 +311,7 @@ def test_metrics_mode_parsing() -> None: def test_metrics_mode_none() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -340,7 +319,6 @@ def test_metrics_mode_none() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -353,7 +331,7 @@ def test_metrics_mode_none() -> None: def test_metrics_mode_counts() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -361,7 +339,6 @@ def test_metrics_mode_counts() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -374,7 +351,7 @@ def test_metrics_mode_counts() -> None: def test_metrics_mode_full() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -382,7 +359,6 @@ def test_metrics_mode_full() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -401,7 +377,7 @@ def test_metrics_mode_full() -> None: def test_metrics_mode_non_default_trunc() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -409,7 +385,6 @@ def test_metrics_mode_non_default_trunc() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -428,7 +403,7 @@ def test_metrics_mode_non_default_trunc() -> None: def test_column_metrics_mode() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -437,7 +412,6 @@ def test_column_metrics_mode() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -455,7 +429,7 @@ def test_column_metrics_mode() -> None: assert 1 not in datafile.upper_bounds -def construct_test_table_primitive_types() -> Tuple[Any, Any, Union[TableMetadataV1, TableMetadataV2]]: +def construct_test_table_primitive_types() -> Tuple[pq.FileMetaData, Union[TableMetadataV1, TableMetadataV2]]: table_metadata = { "format-version": 2, "location": "s3://bucket/test/location", @@ -527,11 +501,11 @@ def construct_test_table_primitive_types() -> Tuple[Any, Any, Union[TableMetadat with pq.ParquetWriter(f, table.schema, metadata_collector=metadata_collector) as writer: writer.write_table(table) - return f.getvalue(), metadata_collector[0], table_metadata + return metadata_collector[0], table_metadata def test_metrics_primitive_types() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table_primitive_types() + metadata, table_metadata = construct_test_table_primitive_types() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -539,7 +513,6 @@ def test_metrics_primitive_types() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -579,7 +552,7 @@ def test_metrics_primitive_types() -> None: assert datafile.upper_bounds[12] == b"wp" -def construct_test_table_invalid_upper_bound() -> Tuple[Any, Any, Union[TableMetadataV1, TableMetadataV2]]: +def construct_test_table_invalid_upper_bound() -> Tuple[pq.FileMetaData, Union[TableMetadataV1, TableMetadataV2]]: table_metadata = { "format-version": 2, "location": "s3://bucket/test/location", @@ -627,11 +600,11 @@ def construct_test_table_invalid_upper_bound() -> Tuple[Any, Any, Union[TableMet with pq.ParquetWriter(f, table.schema, metadata_collector=metadata_collector) as writer: writer.write_table(table) - return f.getvalue(), metadata_collector[0], table_metadata + return metadata_collector[0], table_metadata def test_metrics_invalid_upper_bound() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table_invalid_upper_bound() + metadata, table_metadata = construct_test_table_invalid_upper_bound() schema = get_current_schema(table_metadata) datafile = DataFile() @@ -639,7 +612,6 @@ def test_metrics_invalid_upper_bound() -> None: fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) @@ -660,14 +632,13 @@ def test_metrics_invalid_upper_bound() -> None: def test_offsets() -> None: - (file_bytes, metadata, table_metadata) = construct_test_table() + metadata, table_metadata = construct_test_table() schema = get_current_schema(table_metadata) datafile = DataFile() fill_parquet_file_metadata( datafile, metadata, - len(file_bytes), compute_statistics_plan(schema, table_metadata.properties), parquet_path_to_id_mapping(schema), ) diff --git a/tests/table/test_init.py b/tests/table/test_init.py index d3bbe418c4..efee43b192 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -59,7 +59,7 @@ UpdateSchema, _apply_table_update, _generate_snapshot_id, - _match_deletes_to_datafile, + _match_deletes_to_data_file, _TableMetadataUpdateContext, update_table_metadata, ) @@ -358,7 +358,7 @@ def test_match_deletes_to_datafile() -> None: upper_bounds={}, ), ) - assert _match_deletes_to_datafile( + assert _match_deletes_to_data_file( data_entry, SortedList(iterable=[delete_entry_1, delete_entry_2], key=lambda entry: entry.sequence_number or INITIAL_SEQUENCE_NUMBER), ) == { @@ -415,7 +415,7 @@ def test_match_deletes_to_datafile_duplicate_number() -> None: upper_bounds={}, ), ) - assert _match_deletes_to_datafile( + assert _match_deletes_to_data_file( data_entry, SortedList(iterable=[delete_entry_1, delete_entry_2], key=lambda entry: entry.sequence_number or INITIAL_SEQUENCE_NUMBER), ) == { diff --git a/tests/table/test_snapshots.py b/tests/table/test_snapshots.py index 124c513022..3591847ad6 100644 --- a/tests/table/test_snapshots.py +++ b/tests/table/test_snapshots.py @@ -18,7 +18,7 @@ import pytest from pyiceberg.manifest import DataFile, DataFileContent, ManifestContent, ManifestFile -from pyiceberg.table.snapshots import Operation, Snapshot, SnapshotSummaryCollector, Summary, _update_snapshot_summaries +from pyiceberg.table.snapshots import Operation, Snapshot, SnapshotSummaryCollector, Summary, update_snapshot_summaries @pytest.fixture @@ -161,7 +161,7 @@ def test_snapshot_summary_collector(data_file: DataFile) -> None: def test_merge_snapshot_summaries_empty() -> None: - assert _update_snapshot_summaries(Summary(Operation.APPEND)) == Summary( + assert update_snapshot_summaries(Summary(Operation.APPEND)) == Summary( operation=Operation.APPEND, **{ 'total-data-files': '0', @@ -175,7 +175,7 @@ def test_merge_snapshot_summaries_empty() -> None: def test_merge_snapshot_summaries_new_summary() -> None: - actual = _update_snapshot_summaries( + actual = update_snapshot_summaries( summary=Summary( operation=Operation.APPEND, **{ @@ -211,7 +211,7 @@ def test_merge_snapshot_summaries_new_summary() -> None: def test_merge_snapshot_summaries_overwrite_summary() -> None: - actual = _update_snapshot_summaries( + actual = update_snapshot_summaries( summary=Summary( operation=Operation.OVERWRITE, **{ @@ -260,17 +260,17 @@ def test_merge_snapshot_summaries_overwrite_summary() -> None: def test_invalid_operation() -> None: with pytest.raises(ValueError) as e: - _update_snapshot_summaries(summary=Summary(Operation.REPLACE)) + update_snapshot_summaries(summary=Summary(Operation.REPLACE)) assert "Operation not implemented: Operation.REPLACE" in str(e.value) with pytest.raises(ValueError) as e: - _update_snapshot_summaries(summary=Summary(Operation.DELETE)) + update_snapshot_summaries(summary=Summary(Operation.DELETE)) assert "Operation not implemented: Operation.DELETE" in str(e.value) def test_invalid_type() -> None: with pytest.raises(ValueError) as e: - _update_snapshot_summaries( + update_snapshot_summaries( summary=Summary( operation=Operation.OVERWRITE, **{ diff --git a/tests/utils/test_manifest.py b/tests/utils/test_manifest.py index 08906b68ad..6ef11a47ea 100644 --- a/tests/utils/test_manifest.py +++ b/tests/utils/test_manifest.py @@ -55,14 +55,19 @@ def _verify_metadata_with_fastavro(avro_file: str, expected_metadata: Dict[str, def test_read_manifest_entry(generated_manifest_entry_file: str) -> None: manifest = ManifestFile( - manifest_path=generated_manifest_entry_file, manifest_length=0, partition_spec_id=0, sequence_number=None, partitions=[] + manifest_path=generated_manifest_entry_file, + manifest_length=0, + partition_spec_id=0, + added_snapshot_id=0, + sequence_number=0, + partitions=[], ) manifest_entries = manifest.fetch_manifest_entry(PyArrowFileIO()) manifest_entry = manifest_entries[0] assert manifest_entry.status == ManifestEntryStatus.ADDED assert manifest_entry.snapshot_id == 8744736658442914487 - assert manifest_entry.data_sequence_number is None + assert manifest_entry.data_sequence_number == 0 assert isinstance(manifest_entry.data_file, DataFile) data_file = manifest_entry.data_file From d796878e911a70ce71bad5f65e68e1da60e9e487 Mon Sep 17 00:00:00 2001 From: waifairer <98724840+waifairer@users.noreply.github.com> Date: Thu, 18 Jan 2024 03:56:30 -0700 Subject: [PATCH 029/169] Implement pre-existing session support for dynamodb catalog (#104) --- mkdocs/docs/configuration.md | 13 +++++++++++++ mkdocs/docs/contributing.md | 4 +++- pyiceberg/catalog/dynamodb.py | 10 +++++++++- tests/catalog/test_dynamodb.py | 28 ++++++++++++++++++++++++++-- 4 files changed, 51 insertions(+), 4 deletions(-) diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index 12bb35181b..bfe1e62fac 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -218,6 +218,19 @@ catalog: table-name: iceberg ``` +If you prefer to pass the credentials explicitly to the client instead of relying on environment variables, + +```yaml +catalog: + default: + type: dynamodb + table-name: iceberg + aws_access_key_id: + aws_secret_access_key: + aws_session_token: + region_name: +``` + # Concurrency PyIceberg uses multiple threads to parallelize operations. The number of workers can be configured by supplying a `max-workers` entry in the configuration file, or by setting the `PYICEBERG_MAX_WORKERS` environment variable. The default value depends on the system hardware and Python version. See [the Python documentation](https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor) for more details. diff --git a/mkdocs/docs/contributing.md b/mkdocs/docs/contributing.md index 3973b763a0..8ec6dcb2d2 100644 --- a/mkdocs/docs/contributing.md +++ b/mkdocs/docs/contributing.md @@ -30,10 +30,12 @@ For the development, Poetry is used for packing and dependency management. You c pip install poetry ``` -If you have an older version of pip and virtualenv you need to update these: +Make sure you're using an up-to-date environment from venv ```bash pip install --upgrade virtualenv pip +python -m venv ./venv +source ./venv/bin/activate ``` To get started, you can run `make install`, which installs Poetry and all the dependencies of the Iceberg library. This also installs the development dependencies. If you don't want to install the development dependencies, you need to install using `poetry install --no-dev`. diff --git a/pyiceberg/catalog/dynamodb.py b/pyiceberg/catalog/dynamodb.py index 3eee95da3c..6c3f931bd8 100644 --- a/pyiceberg/catalog/dynamodb.py +++ b/pyiceberg/catalog/dynamodb.py @@ -80,7 +80,15 @@ class DynamoDbCatalog(Catalog): def __init__(self, name: str, **properties: str): super().__init__(name, **properties) - self.dynamodb = boto3.client(DYNAMODB_CLIENT) + session = boto3.Session( + profile_name=properties.get("profile_name"), + region_name=properties.get("region_name"), + botocore_session=properties.get("botocore_session"), + aws_access_key_id=properties.get("aws_access_key_id"), + aws_secret_access_key=properties.get("aws_secret_access_key"), + aws_session_token=properties.get("aws_session_token"), + ) + self.dynamodb = session.client(DYNAMODB_CLIENT) self.dynamodb_table_name = self.properties.get(DYNAMODB_TABLE_NAME, DYNAMODB_TABLE_NAME_DEFAULT) self._ensure_catalog_table_exists_or_create() diff --git a/tests/catalog/test_dynamodb.py b/tests/catalog/test_dynamodb.py index 917a5d2937..fb4eaaa575 100644 --- a/tests/catalog/test_dynamodb.py +++ b/tests/catalog/test_dynamodb.py @@ -14,11 +14,12 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import List +from typing import Any, Dict, List import boto3 import pytest from moto import mock_dynamodb +from unittest import mock from pyiceberg.catalog import METADATA_LOCATION, TABLE_TYPE from pyiceberg.catalog.dynamodb import ( @@ -26,6 +27,7 @@ DYNAMODB_COL_IDENTIFIER, DYNAMODB_COL_NAMESPACE, DYNAMODB_TABLE_NAME_DEFAULT, + ACTIVE, DynamoDbCatalog, _add_property_prefix, ) @@ -47,12 +49,13 @@ def test_create_dynamodb_catalog_with_table_name(_dynamodb, _bucket_initialize: DynamoDbCatalog("test_ddb_catalog") response = _dynamodb.describe_table(TableName=DYNAMODB_TABLE_NAME_DEFAULT) assert response["Table"]["TableName"] == DYNAMODB_TABLE_NAME_DEFAULT + assert response["Table"]["TableStatus"] == ACTIVE custom_table_name = "custom_table_name" DynamoDbCatalog("test_ddb_catalog", **{"table-name": custom_table_name}) response = _dynamodb.describe_table(TableName=custom_table_name) assert response["Table"]["TableName"] == custom_table_name - + assert response["Table"]["TableStatus"] == ACTIVE @mock_dynamodb def test_create_table_with_database_location( @@ -506,3 +509,24 @@ def test_update_namespace_properties_overlap_update_removal(_bucket_initialize: test_catalog.update_namespace_properties(database_name, removals, updates) # should not modify the properties assert test_catalog.load_namespace_properties(database_name) == test_properties + +def test_passing_provided_profile() -> None: + catalog_name = "test_ddb_catalog" + session_props = { + "aws_access_key_id": "abc", + "aws_secret_access_key": "def", + "aws_session_token": "ghi", + "region_name": "eu-central-1", + "botocore_session": None, + "profile_name": None + } + props = {"py-io-impl": "pyiceberg.io.fsspec.FsspecFileIO"} + props.update(session_props) + with mock.patch('boto3.Session', return_value=mock.Mock()) as mock_session: + mock_client = mock.Mock() + mock_session.return_value.client.return_value = mock_client + mock_client.describe_table.return_value = {'Table': {'TableStatus': 'ACTIVE'}} + test_catalog = DynamoDbCatalog(catalog_name, **props) + assert test_catalog.dynamodb is mock_client + mock_session.assert_called_with(**session_props) + assert test_catalog.dynamodb is mock_session().client() From f44fc965995555afc4203d46fb24738331521533 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 18 Jan 2024 21:43:26 +0100 Subject: [PATCH 030/169] Fix the CI (#279) * Fix the CI * Fix CI --- tests/catalog/test_dynamodb.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/tests/catalog/test_dynamodb.py b/tests/catalog/test_dynamodb.py index fb4eaaa575..5af89ef3be 100644 --- a/tests/catalog/test_dynamodb.py +++ b/tests/catalog/test_dynamodb.py @@ -14,20 +14,20 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import Any, Dict, List +from typing import List +from unittest import mock import boto3 import pytest from moto import mock_dynamodb -from unittest import mock from pyiceberg.catalog import METADATA_LOCATION, TABLE_TYPE from pyiceberg.catalog.dynamodb import ( + ACTIVE, DYNAMODB_COL_CREATED_AT, DYNAMODB_COL_IDENTIFIER, DYNAMODB_COL_NAMESPACE, DYNAMODB_TABLE_NAME_DEFAULT, - ACTIVE, DynamoDbCatalog, _add_property_prefix, ) @@ -57,6 +57,7 @@ def test_create_dynamodb_catalog_with_table_name(_dynamodb, _bucket_initialize: assert response["Table"]["TableName"] == custom_table_name assert response["Table"]["TableStatus"] == ACTIVE + @mock_dynamodb def test_create_table_with_database_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str @@ -510,6 +511,7 @@ def test_update_namespace_properties_overlap_update_removal(_bucket_initialize: # should not modify the properties assert test_catalog.load_namespace_properties(database_name) == test_properties + def test_passing_provided_profile() -> None: catalog_name = "test_ddb_catalog" session_props = { @@ -518,10 +520,10 @@ def test_passing_provided_profile() -> None: "aws_session_token": "ghi", "region_name": "eu-central-1", "botocore_session": None, - "profile_name": None + "profile_name": None, } props = {"py-io-impl": "pyiceberg.io.fsspec.FsspecFileIO"} - props.update(session_props) + props.update(session_props) # type: ignore with mock.patch('boto3.Session', return_value=mock.Mock()) as mock_session: mock_client = mock.Mock() mock_session.return_value.client.return_value = mock_client From e1018e5005eb777cf0741e190b69f72d3d4256f1 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 18 Jan 2024 20:26:00 -0500 Subject: [PATCH 031/169] fix all falsey transforms (#280) --- pyiceberg/transforms.py | 6 +++--- tests/test_transforms.py | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/pyiceberg/transforms.py b/pyiceberg/transforms.py index 4386547c55..9f499a3dd4 100644 --- a/pyiceberg/transforms.py +++ b/pyiceberg/transforms.py @@ -382,7 +382,7 @@ def month_func(v: Any) -> int: else: raise ValueError(f"Cannot apply month transform for type: {source}") - return lambda v: month_func(v) if v else None + return lambda v: month_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: return isinstance(source, (DateType, TimestampType, TimestamptzType)) @@ -424,7 +424,7 @@ def day_func(v: Any) -> int: else: raise ValueError(f"Cannot apply day transform for type: {source}") - return lambda v: day_func(v) if v else None + return lambda v: day_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: return isinstance(source, (DateType, TimestampType, TimestamptzType)) @@ -464,7 +464,7 @@ def hour_func(v: Any) -> int: else: raise ValueError(f"Cannot apply hour transform for type: {source}") - return lambda v: hour_func(v) if v else None + return lambda v: hour_func(v) if v is not None else None def can_transform(self, source: IcebergType) -> bool: return isinstance(source, (TimestampType, TimestamptzType)) diff --git a/tests/test_transforms.py b/tests/test_transforms.py index d4910cc1a1..4fea7739d1 100644 --- a/tests/test_transforms.py +++ b/tests/test_transforms.py @@ -236,6 +236,19 @@ def test_negative_value_to_human_string(negative_value: int, time_transform: Tim assert time_transform.to_human_string(TimestampType(), negative_value) == expected +@pytest.mark.parametrize( + "zero_value,time_transform,expected", + [ + (0, YearTransform(), "1970"), + (0, MonthTransform(), "1970-01"), + (0, DayTransform(), "1970-01-01"), + (0, HourTransform(), "1970-01-01-00"), + ], +) +def test_zero_value_to_human_string(zero_value: int, time_transform: TimeTransform[Any], expected: str) -> None: + assert time_transform.to_human_string(TimestampType(), zero_value) == expected + + @pytest.mark.parametrize( "type_var", [ @@ -274,6 +287,12 @@ def test_time_methods(type_var: PrimitiveType) -> None: (MonthTransform(), TimestamptzType(), -1, -1), (DayTransform(), TimestampType(), 1512151975038194, 17501), (DayTransform(), TimestampType(), -1, -1), + (YearTransform(), DateType(), 0, 0), + (MonthTransform(), DateType(), 0, 0), + (DayTransform(), DateType(), 0, 0), + (YearTransform(), TimestampType(), 0, 0), + (MonthTransform(), TimestampType(), 0, 0), + (DayTransform(), TimestampType(), 0, 0), ], ) def test_time_apply_method(transform: TimeTransform[Any], type_var: PrimitiveType, value: int, expected: int) -> None: @@ -291,6 +310,7 @@ def test_hour_method(type_var: PrimitiveType) -> None: assert HourTransform().can_transform(type_var) assert HourTransform().result_type(type_var) == IntegerType() assert HourTransform().transform(type_var)(1512151975038194) == 420042 # type: ignore + assert HourTransform().transform(type_var)(0) == 0 # type: ignore assert HourTransform().dedup_name == "time" From 70972d9436f24fd899a7a2c8600c96bb4d282e12 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 18 Jan 2024 20:30:01 -0500 Subject: [PATCH 032/169] Apply Name mapping (#219) --- pyiceberg/io/pyarrow.py | 229 ++++++++++++++++------- pyiceberg/table/__init__.py | 13 ++ pyiceberg/table/name_mapping.py | 2 + tests/io/test_pyarrow_visitor.py | 305 +++++++++++++++++++++++++++++++ 4 files changed, 484 insertions(+), 65 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index b4988c677a..035f5e8031 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -124,6 +124,7 @@ visit_with_partner, ) from pyiceberg.table import WriteTask +from pyiceberg.table.name_mapping import NameMapping from pyiceberg.transforms import TruncateTransform from pyiceberg.typedef import EMPTY_DICT, Properties, Record from pyiceberg.types import ( @@ -164,6 +165,9 @@ # The PARQUET: in front means that it is Parquet specific, in this case the field_id PYARROW_PARQUET_FIELD_ID_KEY = b"PARQUET:field_id" PYARROW_FIELD_DOC_KEY = b"doc" +LIST_ELEMENT_NAME = "element" +MAP_KEY_NAME = "key" +MAP_VALUE_NAME = "value" T = TypeVar("T") @@ -631,8 +635,16 @@ def _combine_positional_deletes(positional_deletes: List[pa.ChunkedArray], rows: return np.setdiff1d(np.arange(rows), all_chunks, assume_unique=False) -def pyarrow_to_schema(schema: pa.Schema) -> Schema: - visitor = _ConvertToIceberg() +def pyarrow_to_schema(schema: pa.Schema, name_mapping: Optional[NameMapping] = None) -> Schema: + has_ids = visit_pyarrow(schema, _HasIds()) + if has_ids: + visitor = _ConvertToIceberg() + elif name_mapping is not None: + visitor = _ConvertToIceberg(name_mapping=name_mapping) + else: + raise ValueError( + "Parquet file does not have field-ids and the Iceberg table does not have 'schema.name-mapping.default' defined" + ) return visit_pyarrow(schema, visitor) @@ -653,50 +665,47 @@ def visit_pyarrow(obj: Union[pa.DataType, pa.Schema], visitor: PyArrowSchemaVisi @visit_pyarrow.register(pa.Schema) -def _(obj: pa.Schema, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: - struct_results: List[Optional[T]] = [] - for field in obj: - visitor.before_field(field) - struct_result = visit_pyarrow(field.type, visitor) - visitor.after_field(field) - struct_results.append(struct_result) - - return visitor.schema(obj, struct_results) +def _(obj: pa.Schema, visitor: PyArrowSchemaVisitor[T]) -> T: + return visitor.schema(obj, visit_pyarrow(pa.struct(obj), visitor)) @visit_pyarrow.register(pa.StructType) -def _(obj: pa.StructType, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: - struct_results: List[Optional[T]] = [] +def _(obj: pa.StructType, visitor: PyArrowSchemaVisitor[T]) -> T: + results = [] + for field in obj: visitor.before_field(field) - struct_result = visit_pyarrow(field.type, visitor) + result = visit_pyarrow(field.type, visitor) + results.append(visitor.field(field, result)) visitor.after_field(field) - struct_results.append(struct_result) - return visitor.struct(obj, struct_results) + return visitor.struct(obj, results) @visit_pyarrow.register(pa.ListType) -def _(obj: pa.ListType, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: - visitor.before_field(obj.value_field) - list_result = visit_pyarrow(obj.value_field.type, visitor) - visitor.after_field(obj.value_field) - return visitor.list(obj, list_result) +def _(obj: pa.ListType, visitor: PyArrowSchemaVisitor[T]) -> T: + visitor.before_list_element(obj.value_field) + result = visit_pyarrow(obj.value_type, visitor) + visitor.after_list_element(obj.value_field) + + return visitor.list(obj, result) @visit_pyarrow.register(pa.MapType) -def _(obj: pa.MapType, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: - visitor.before_field(obj.key_field) - key_result = visit_pyarrow(obj.key_field.type, visitor) - visitor.after_field(obj.key_field) - visitor.before_field(obj.item_field) - value_result = visit_pyarrow(obj.item_field.type, visitor) - visitor.after_field(obj.item_field) +def _(obj: pa.MapType, visitor: PyArrowSchemaVisitor[T]) -> T: + visitor.before_map_key(obj.key_field) + key_result = visit_pyarrow(obj.key_type, visitor) + visitor.after_map_key(obj.key_field) + + visitor.before_map_value(obj.item_field) + value_result = visit_pyarrow(obj.item_type, visitor) + visitor.after_map_value(obj.item_field) + return visitor.map(obj, key_result, value_result) @visit_pyarrow.register(pa.DataType) -def _(obj: pa.DataType, visitor: PyArrowSchemaVisitor[T]) -> Optional[T]: +def _(obj: pa.DataType, visitor: PyArrowSchemaVisitor[T]) -> T: if pa.types.is_nested(obj): raise TypeError(f"Expected primitive type, got: {type(obj)}") return visitor.primitive(obj) @@ -709,24 +718,46 @@ def before_field(self, field: pa.Field) -> None: def after_field(self, field: pa.Field) -> None: """Override this method to perform an action immediately after visiting a field.""" + def before_list_element(self, element: pa.Field) -> None: + """Override this method to perform an action immediately before visiting an element within a ListType.""" + + def after_list_element(self, element: pa.Field) -> None: + """Override this method to perform an action immediately after visiting an element within a ListType.""" + + def before_map_key(self, key: pa.Field) -> None: + """Override this method to perform an action immediately before visiting a key within a MapType.""" + + def after_map_key(self, key: pa.Field) -> None: + """Override this method to perform an action immediately after visiting a key within a MapType.""" + + def before_map_value(self, value: pa.Field) -> None: + """Override this method to perform an action immediately before visiting a value within a MapType.""" + + def after_map_value(self, value: pa.Field) -> None: + """Override this method to perform an action immediately after visiting a value within a MapType.""" + @abstractmethod - def schema(self, schema: pa.Schema, field_results: List[Optional[T]]) -> Optional[T]: + def schema(self, schema: pa.Schema, struct_result: T) -> T: """Visit a schema.""" @abstractmethod - def struct(self, struct: pa.StructType, field_results: List[Optional[T]]) -> Optional[T]: + def struct(self, struct: pa.StructType, field_results: List[T]) -> T: """Visit a struct.""" @abstractmethod - def list(self, list_type: pa.ListType, element_result: Optional[T]) -> Optional[T]: + def field(self, field: pa.Field, field_result: T) -> T: + """Visit a field.""" + + @abstractmethod + def list(self, list_type: pa.ListType, element_result: T) -> T: """Visit a list.""" @abstractmethod - def map(self, map_type: pa.MapType, key_result: Optional[T], value_result: Optional[T]) -> Optional[T]: + def map(self, map_type: pa.MapType, key_result: T, value_result: T) -> T: """Visit a map.""" @abstractmethod - def primitive(self, primitive: pa.DataType) -> Optional[T]: + def primitive(self, primitive: pa.DataType) -> T: """Visit a primitive type.""" @@ -738,42 +769,84 @@ def _get_field_id(field: pa.Field) -> Optional[int]: ) -class _ConvertToIceberg(PyArrowSchemaVisitor[Union[IcebergType, Schema]]): - def _convert_fields(self, arrow_fields: Iterable[pa.Field], field_results: List[Optional[IcebergType]]) -> List[NestedField]: - fields = [] - for i, field in enumerate(arrow_fields): - field_id = _get_field_id(field) - field_doc = doc_str.decode() if (field.metadata and (doc_str := field.metadata.get(PYARROW_FIELD_DOC_KEY))) else None - field_type = field_results[i] - if field_type is not None and field_id is not None: - fields.append(NestedField(field_id, field.name, field_type, required=not field.nullable, doc=field_doc)) - return fields - - def schema(self, schema: pa.Schema, field_results: List[Optional[IcebergType]]) -> Schema: - return Schema(*self._convert_fields(schema, field_results)) - - def struct(self, struct: pa.StructType, field_results: List[Optional[IcebergType]]) -> IcebergType: - return StructType(*self._convert_fields(struct, field_results)) - - def list(self, list_type: pa.ListType, element_result: Optional[IcebergType]) -> Optional[IcebergType]: +class _HasIds(PyArrowSchemaVisitor[bool]): + def schema(self, schema: pa.Schema, struct_result: bool) -> bool: + return struct_result + + def struct(self, struct: pa.StructType, field_results: List[bool]) -> bool: + return all(field_results) + + def field(self, field: pa.Field, field_result: bool) -> bool: + return all([_get_field_id(field) is not None, field_result]) + + def list(self, list_type: pa.ListType, element_result: bool) -> bool: element_field = list_type.value_field element_id = _get_field_id(element_field) - if element_result is not None and element_id is not None: - return ListType(element_id, element_result, element_required=not element_field.nullable) - return None + return element_result and element_id is not None - def map( - self, map_type: pa.MapType, key_result: Optional[IcebergType], value_result: Optional[IcebergType] - ) -> Optional[IcebergType]: + def map(self, map_type: pa.MapType, key_result: bool, value_result: bool) -> bool: key_field = map_type.key_field key_id = _get_field_id(key_field) value_field = map_type.item_field value_id = _get_field_id(value_field) - if key_result is not None and value_result is not None and key_id is not None and value_id is not None: - return MapType(key_id, key_result, value_id, value_result, value_required=not value_field.nullable) - return None + return all([key_id is not None, value_id is not None, key_result, value_result]) + + def primitive(self, primitive: pa.DataType) -> bool: + return True - def primitive(self, primitive: pa.DataType) -> IcebergType: + +class _ConvertToIceberg(PyArrowSchemaVisitor[Union[IcebergType, Schema]]): + """Converts PyArrowSchema to Iceberg Schema. Applies the IDs from name_mapping if provided.""" + + _field_names: List[str] + _name_mapping: Optional[NameMapping] + + def __init__(self, name_mapping: Optional[NameMapping] = None) -> None: + self._field_names = [] + self._name_mapping = name_mapping + + def _current_path(self) -> str: + return ".".join(self._field_names) + + def _field_id(self, field: pa.Field) -> int: + if self._name_mapping: + return self._name_mapping.find(self._current_path()).field_id + elif (field_id := _get_field_id(field)) is not None: + return field_id + else: + raise ValueError(f"Cannot convert {field} to Iceberg Field as field_id is empty.") + + def schema(self, schema: pa.Schema, struct_result: StructType) -> Schema: + return Schema(*struct_result.fields) + + def struct(self, struct: pa.StructType, field_results: List[NestedField]) -> StructType: + return StructType(*field_results) + + def field(self, field: pa.Field, field_result: IcebergType) -> NestedField: + field_id = self._field_id(field) + field_doc = doc_str.decode() if (field.metadata and (doc_str := field.metadata.get(PYARROW_FIELD_DOC_KEY))) else None + field_type = field_result + return NestedField(field_id, field.name, field_type, required=not field.nullable, doc=field_doc) + + def list(self, list_type: pa.ListType, element_result: IcebergType) -> ListType: + element_field = list_type.value_field + self._field_names.append(LIST_ELEMENT_NAME) + element_id = self._field_id(element_field) + self._field_names.pop() + return ListType(element_id, element_result, element_required=not element_field.nullable) + + def map(self, map_type: pa.MapType, key_result: IcebergType, value_result: IcebergType) -> MapType: + key_field = map_type.key_field + self._field_names.append(MAP_KEY_NAME) + key_id = self._field_id(key_field) + self._field_names.pop() + value_field = map_type.item_field + self._field_names.append(MAP_VALUE_NAME) + value_id = self._field_id(value_field) + self._field_names.pop() + return MapType(key_id, key_result, value_id, value_result, value_required=not value_field.nullable) + + def primitive(self, primitive: pa.DataType) -> PrimitiveType: if pa.types.is_boolean(primitive): return BooleanType() elif pa.types.is_int32(primitive): @@ -808,6 +881,30 @@ def primitive(self, primitive: pa.DataType) -> IcebergType: raise TypeError(f"Unsupported type: {primitive}") + def before_field(self, field: pa.Field) -> None: + self._field_names.append(field.name) + + def after_field(self, field: pa.Field) -> None: + self._field_names.pop() + + def before_list_element(self, element: pa.Field) -> None: + self._field_names.append(LIST_ELEMENT_NAME) + + def after_list_element(self, element: pa.Field) -> None: + self._field_names.pop() + + def before_map_key(self, key: pa.Field) -> None: + self._field_names.append(MAP_KEY_NAME) + + def after_map_key(self, element: pa.Field) -> None: + self._field_names.pop() + + def before_map_value(self, value: pa.Field) -> None: + self._field_names.append(MAP_VALUE_NAME) + + def after_map_value(self, element: pa.Field) -> None: + self._field_names.pop() + def _task_to_table( fs: FileSystem, @@ -819,6 +916,7 @@ def _task_to_table( case_sensitive: bool, row_counts: List[int], limit: Optional[int] = None, + name_mapping: Optional[NameMapping] = None, ) -> Optional[pa.Table]: if limit and sum(row_counts) >= limit: return None @@ -831,9 +929,9 @@ def _task_to_table( schema_raw = None if metadata := physical_schema.metadata: schema_raw = metadata.get(ICEBERG_SCHEMA) - # TODO: if field_ids are not present, Name Mapping should be implemented to look them up in the table schema, - # see https://github.com/apache/iceberg/issues/7451 - file_schema = Schema.model_validate_json(schema_raw) if schema_raw is not None else pyarrow_to_schema(physical_schema) + file_schema = ( + Schema.model_validate_json(schema_raw) if schema_raw is not None else pyarrow_to_schema(physical_schema, name_mapping) + ) pyarrow_filter = None if bound_row_filter is not AlwaysTrue(): @@ -970,6 +1068,7 @@ def project_table( case_sensitive, row_counts, limit, + table.name_mapping(), ) for task in tasks ] diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 7c1b0bdecc..057dd8427c 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -81,6 +81,12 @@ TableMetadata, TableMetadataUtil, ) +from pyiceberg.table.name_mapping import ( + SCHEMA_NAME_MAPPING_DEFAULT, + NameMapping, + create_mapping_from_schema, + parse_mapping_from_json, +) from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef from pyiceberg.table.snapshots import ( Operation, @@ -909,6 +915,13 @@ def history(self) -> List[SnapshotLogEntry]: def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: return UpdateSchema(self, allow_incompatible_changes=allow_incompatible_changes, case_sensitive=case_sensitive) + def name_mapping(self) -> NameMapping: + """Return the table's field-id NameMapping.""" + if name_mapping_json := self.properties.get(SCHEMA_NAME_MAPPING_DEFAULT): + return parse_mapping_from_json(name_mapping_json) + else: + return create_mapping_from_schema(self.schema()) + def append(self, df: pa.Table) -> None: """ Append data to the table. diff --git a/pyiceberg/table/name_mapping.py b/pyiceberg/table/name_mapping.py index ffe96359a8..84a295f5e4 100644 --- a/pyiceberg/table/name_mapping.py +++ b/pyiceberg/table/name_mapping.py @@ -34,6 +34,8 @@ from pyiceberg.typedef import IcebergBaseModel, IcebergRootModel from pyiceberg.types import ListType, MapType, NestedField, PrimitiveType, StructType +SCHEMA_NAME_MAPPING_DEFAULT = "schema.name-mapping.default" + class MappedField(IcebergBaseModel): field_id: int = Field(alias="field-id") diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index c307440352..0986eac409 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -23,11 +23,13 @@ from pyiceberg.io.pyarrow import ( _ConvertToArrowSchema, _ConvertToIceberg, + _HasIds, pyarrow_to_schema, schema_to_pyarrow, visit_pyarrow, ) from pyiceberg.schema import Schema, visit +from pyiceberg.table.name_mapping import MappedField, NameMapping from pyiceberg.types import ( BinaryType, BooleanType, @@ -49,6 +51,104 @@ ) +@pytest.fixture(scope="module") +def pyarrow_schema_simple_without_ids() -> pa.Schema: + return pa.schema([pa.field('some_int', pa.int32(), nullable=True), pa.field('some_string', pa.string(), nullable=False)]) + + +@pytest.fixture(scope="module") +def pyarrow_schema_nested_without_ids() -> pa.Schema: + return pa.schema([ + pa.field('foo', pa.string(), nullable=False), + pa.field('bar', pa.int32(), nullable=False), + pa.field('baz', pa.bool_(), nullable=True), + pa.field('qux', pa.list_(pa.string()), nullable=False), + pa.field( + 'quux', + pa.map_( + pa.string(), + pa.map_(pa.string(), pa.int32()), + ), + nullable=False, + ), + pa.field( + 'location', + pa.list_( + pa.struct([ + pa.field('latitude', pa.float32(), nullable=False), + pa.field('longitude', pa.float32(), nullable=False), + ]), + ), + nullable=False, + ), + pa.field( + 'person', + pa.struct([ + pa.field('name', pa.string(), nullable=True), + pa.field('age', pa.int32(), nullable=False), + ]), + nullable=True, + ), + ]) + + +@pytest.fixture(scope="module") +def iceberg_schema_simple() -> Schema: + return Schema( + NestedField(field_id=1, name="some_int", field_type=IntegerType(), required=False), + NestedField(field_id=2, name="some_string", field_type=StringType(), required=True), + ) + + +@pytest.fixture(scope="module") +def iceberg_schema_nested() -> Schema: + return Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=True), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + NestedField( + field_id=4, + name="qux", + field_type=ListType(element_id=5, element_type=StringType(), element_required=False), + required=True, + ), + NestedField( + field_id=6, + name="quux", + field_type=MapType( + key_id=7, + key_type=StringType(), + value_id=8, + value_type=MapType(key_id=9, key_type=StringType(), value_id=10, value_type=IntegerType(), value_required=False), + value_required=False, + ), + required=True, + ), + NestedField( + field_id=11, + name="location", + field_type=ListType( + element_id=12, + element_type=StructType( + NestedField(field_id=13, name="latitude", field_type=FloatType(), required=True), + NestedField(field_id=14, name="longitude", field_type=FloatType(), required=True), + ), + element_required=False, + ), + required=True, + ), + NestedField( + field_id=15, + name="person", + field_type=StructType( + NestedField(field_id=16, name="name", field_type=StringType(), required=False), + NestedField(field_id=17, name="age", field_type=IntegerType(), required=True), + ), + required=False, + ), + ) + + def test_pyarrow_binary_to_iceberg() -> None: length = 23 pyarrow_type = pa.binary(length) @@ -267,3 +367,208 @@ def test_round_schema_conversion_nested(table_schema_nested: Schema) -> None: 15: person: optional struct<16: name: optional string, 17: age: required int> }""" assert actual == expected + + +def test_simple_schema_has_missing_ids() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert not has_ids + + +def test_simple_schema_has_missing_ids_partial() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False, metadata={"PARQUET:field_id": "1", "doc": "foo doc"}), + pa.field('bar', pa.int32(), nullable=False), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert not has_ids + + +def test_nested_schema_has_missing_ids() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False), + pa.field( + 'quux', + pa.map_( + pa.string(), + pa.map_(pa.string(), pa.int32()), + ), + nullable=False, + ), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert not has_ids + + +def test_nested_schema_has_ids() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False, metadata={"PARQUET:field_id": "1", "doc": "foo doc"}), + pa.field( + 'quux', + pa.map_( + pa.field("key", pa.string(), nullable=False, metadata={"PARQUET:field_id": "7"}), + pa.field( + "value", + pa.map_( + pa.field('key', pa.string(), nullable=False, metadata={"PARQUET:field_id": "9"}), + pa.field('value', pa.int32(), metadata={"PARQUET:field_id": "10"}), + ), + nullable=False, + metadata={"PARQUET:field_id": "8"}, + ), + ), + nullable=False, + metadata={"PARQUET:field_id": "6", "doc": "quux doc"}, + ), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert has_ids + + +def test_nested_schema_has_partial_missing_ids() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False, metadata={"PARQUET:field_id": "1", "doc": "foo doc"}), + pa.field( + 'quux', + pa.map_( + pa.field("key", pa.string(), nullable=False, metadata={"PARQUET:field_id": "7"}), + pa.field( + "value", + pa.map_(pa.field('key', pa.string(), nullable=False), pa.field('value', pa.int32())), + nullable=False, + ), + ), + nullable=False, + metadata={"PARQUET:field_id": "6", "doc": "quux doc"}, + ), + ]) + visitor = _HasIds() + has_ids = visit_pyarrow(schema, visitor) + assert not has_ids + + +def test_pyarrow_schema_to_schema_missing_ids_and_name_mapping(pyarrow_schema_simple_without_ids: pa.Schema) -> None: + schema = pyarrow_schema_simple_without_ids + with pytest.raises(ValueError) as exc_info: + _ = pyarrow_to_schema(schema) + assert ( + "Parquet file does not have field-ids and the Iceberg table does not have 'schema.name-mapping.default' defined" + in str(exc_info.value) + ) + + +def test_simple_pyarrow_schema_to_schema_missing_ids_using_name_mapping( + pyarrow_schema_simple_without_ids: pa.Schema, iceberg_schema_simple: Schema +) -> None: + schema = pyarrow_schema_simple_without_ids + name_mapping = NameMapping([ + MappedField(field_id=1, names=['some_int']), + MappedField(field_id=2, names=['some_string']), + ]) + + assert pyarrow_to_schema(schema, name_mapping) == iceberg_schema_simple + + +def test_simple_pyarrow_schema_to_schema_missing_ids_using_name_mapping_partial_exception( + pyarrow_schema_simple_without_ids: pa.Schema, +) -> None: + schema = pyarrow_schema_simple_without_ids + name_mapping = NameMapping([ + MappedField(field_id=1, names=['some_string']), + ]) + with pytest.raises(ValueError) as exc_info: + _ = pyarrow_to_schema(schema, name_mapping) + assert "Could not find field with name: some_int" in str(exc_info.value) + + +def test_nested_pyarrow_schema_to_schema_missing_ids_using_name_mapping( + pyarrow_schema_nested_without_ids: pa.Schema, iceberg_schema_nested: Schema +) -> None: + schema = pyarrow_schema_nested_without_ids + + name_mapping = NameMapping([ + MappedField(field_id=1, names=['foo']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + MappedField(field_id=4, names=['qux'], fields=[MappedField(field_id=5, names=['element'])]), + MappedField( + field_id=6, + names=['quux'], + fields=[ + MappedField(field_id=7, names=['key']), + MappedField( + field_id=8, + names=['value'], + fields=[ + MappedField(field_id=9, names=['key']), + MappedField(field_id=10, names=['value']), + ], + ), + ], + ), + MappedField( + field_id=11, + names=['location'], + fields=[ + MappedField( + field_id=12, + names=['element'], + fields=[ + MappedField(field_id=13, names=['latitude']), + MappedField(field_id=14, names=['longitude']), + ], + ) + ], + ), + MappedField( + field_id=15, + names=['person'], + fields=[ + MappedField(field_id=16, names=['name']), + MappedField(field_id=17, names=['age']), + ], + ), + ]) + + assert pyarrow_to_schema(schema, name_mapping) == iceberg_schema_nested + + +def test_pyarrow_schema_to_schema_missing_ids_using_name_mapping_nested_missing_id() -> None: + schema = pa.schema([ + pa.field('foo', pa.string(), nullable=False), + pa.field( + 'quux', + pa.map_( + pa.string(), + pa.map_(pa.string(), pa.int32()), + ), + nullable=False, + ), + ]) + + name_mapping = NameMapping([ + MappedField(field_id=1, names=['foo']), + MappedField( + field_id=6, + names=['quux'], + fields=[ + MappedField(field_id=7, names=['key']), + MappedField( + field_id=8, + names=['value'], + fields=[ + MappedField(field_id=10, names=['value']), + ], + ), + ], + ), + ]) + with pytest.raises(ValueError) as exc_info: + _ = pyarrow_to_schema(schema, name_mapping) + assert "Could not find field with name: quux.value.key" in str(exc_info.value) From 94d7821cbc6b31b791e18d4f91c0991684616076 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Jan 2024 18:45:01 -0800 Subject: [PATCH 033/169] Build: Bump griffe from 0.39.0 to 0.39.1 (#282) --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index cac7424d82..52ac9bef14 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -16,7 +16,7 @@ # under the License. mkdocs==1.5.3 -griffe==0.39.0 +griffe==0.39.1 jinja2==3.1.3 mkdocstrings==0.24.0 mkdocstrings-python==1.8.0 From 1befad70bb72e649ec07431a43972aa2cd0cc0da Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Sun, 21 Jan 2024 16:16:57 -0800 Subject: [PATCH 034/169] Fix moto server port conflict (#292) - Change moto server port to 5001 - Throw a meaningful exception when port conflict detected --- tests/conftest.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 07beba07e0..9c53301776 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,6 +27,7 @@ import os import re +import socket import string import uuid from datetime import datetime @@ -1587,7 +1588,7 @@ def fixture_aws_credentials() -> Generator[None, None, None]: os.environ.pop("AWS_DEFAULT_REGION") -MOTO_SERVER = ThreadedMotoServer(ip_address="localhost", port=5000) +MOTO_SERVER = ThreadedMotoServer(ip_address="localhost", port=5001) def pytest_sessionfinish( @@ -1600,10 +1601,17 @@ def pytest_sessionfinish( @pytest.fixture(scope="session") def moto_server() -> ThreadedMotoServer: + # this will throw an exception if the port is already in use + is_port_in_use(MOTO_SERVER._ip_address, MOTO_SERVER._port) MOTO_SERVER.start() return MOTO_SERVER +def is_port_in_use(ip_address: str, port: int) -> None: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind((ip_address, port)) + + @pytest.fixture(scope="session") def moto_endpoint_url(moto_server: ThreadedMotoServer) -> str: _url = f"http://{moto_server._ip_address}:{moto_server._port}" From acfa5bc4e1e0ab167b7d3652438288e9d295f9b6 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Mon, 22 Jan 2024 00:12:03 -0800 Subject: [PATCH 035/169] Implement Hive catalog `_commit_table` (#294) * implement hive catalog _commit_table * also works for table version upgrade --- pyiceberg/catalog/hive.py | 42 ++++++++++++++++++++++++++++++--- tests/catalog/test_hive.py | 2 +- tests/integration/test_reads.py | 5 ---- 3 files changed, 40 insertions(+), 9 deletions(-) diff --git a/pyiceberg/catalog/hive.py b/pyiceberg/catalog/hive.py index ffc9c5333c..331b9ca80d 100644 --- a/pyiceberg/catalog/hive.py +++ b/pyiceberg/catalog/hive.py @@ -66,7 +66,7 @@ from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec from pyiceberg.schema import Schema, SchemaVisitor, visit from pyiceberg.serializers import FromInputFile -from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table +from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, update_table_metadata from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT @@ -150,6 +150,7 @@ def _construct_hive_storage_descriptor(schema: Schema, location: Optional[str]) PROP_TABLE_TYPE = "table_type" PROP_METADATA_LOCATION = "metadata_location" PROP_PREVIOUS_METADATA_LOCATION = "previous_metadata_location" +DEFAULT_PROPERTIES = {'write.parquet.compression-codec': 'zstd'} def _construct_parameters(metadata_location: str, previous_metadata_location: Optional[str] = None) -> Dict[str, Any]: @@ -272,6 +273,7 @@ def create_table( AlreadyExistsError: If a table with the name already exists. ValueError: If the identifier is invalid. """ + properties = {**DEFAULT_PROPERTIES, **properties} database_name, table_name = self.identifier_to_database_and_table(identifier) current_time_millis = int(time.time() * 1000) @@ -279,7 +281,11 @@ def create_table( metadata_location = self._get_metadata_location(location=location) metadata = new_table_metadata( - location=location, schema=schema, partition_spec=partition_spec, sort_order=sort_order, properties=properties + location=location, + schema=schema, + partition_spec=partition_spec, + sort_order=sort_order, + properties=properties, ) io = load_file_io({**self.properties, **properties}, location=location) self._write_metadata(metadata, io, metadata_location) @@ -330,7 +336,37 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. """ - raise NotImplementedError + identifier_tuple = self.identifier_to_tuple_without_catalog( + tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) + ) + current_table = self.load_table(identifier_tuple) + database_name, table_name = self.identifier_to_database_and_table(identifier_tuple, NoSuchTableError) + base_metadata = current_table.metadata + for requirement in table_request.requirements: + requirement.validate(base_metadata) + + updated_metadata = update_table_metadata(base_metadata, table_request.updates) + if updated_metadata == base_metadata: + # no changes, do nothing + return CommitTableResponse(metadata=base_metadata, metadata_location=current_table.metadata_location) + + # write new metadata + new_metadata_version = self._parse_metadata_version(current_table.metadata_location) + 1 + new_metadata_location = self._get_metadata_location(current_table.metadata.location, new_metadata_version) + self._write_metadata(updated_metadata, current_table.io, new_metadata_location) + + # commit to hive + try: + with self._client as open_client: + tbl = open_client.get_table(dbname=database_name, tbl_name=table_name) + tbl.parameters = _construct_parameters( + metadata_location=new_metadata_location, previous_metadata_location=current_table.metadata_location + ) + open_client.alter_table(dbname=database_name, tbl_name=table_name, new_tbl=tbl) + except NoSuchObjectException as e: + raise NoSuchTableError(f"Table does not exist: {table_name}") from e + + return CommitTableResponse(metadata=updated_metadata, metadata_location=new_metadata_location) def load_table(self, identifier: Union[str, Identifier]) -> Table: """Load the table's metadata and return the table instance. diff --git a/tests/catalog/test_hive.py b/tests/catalog/test_hive.py index 54336786f9..f42962f0f3 100644 --- a/tests/catalog/test_hive.py +++ b/tests/catalog/test_hive.py @@ -278,7 +278,7 @@ def test_create_table(table_schema_simple: Schema, hive_database: HiveDatabase, ], current_schema_id=0, last_partition_id=1000, - properties={"owner": "javaberg"}, + properties={"owner": "javaberg", 'write.parquet.compression-codec': 'zstd'}, partition_specs=[PartitionSpec()], default_spec_id=0, current_snapshot_id=None, diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index 3fbdb69ebe..e7c8b74cf0 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -25,7 +25,6 @@ from pyarrow.fs import S3FileSystem from pyiceberg.catalog import Catalog, load_catalog -from pyiceberg.catalog.hive import HiveCatalog from pyiceberg.exceptions import NoSuchTableError from pyiceberg.expressions import ( And, @@ -101,8 +100,6 @@ def create_table(catalog: Catalog) -> Table: @pytest.mark.integration @pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) def test_table_properties(catalog: Catalog) -> None: - if isinstance(catalog, HiveCatalog): - pytest.skip("Not yet implemented: https://github.com/apache/iceberg-python/issues/275") table = create_table(catalog) assert table.properties == DEFAULT_PROPERTIES @@ -398,8 +395,6 @@ def test_filter_on_new_column(catalog: Catalog) -> None: @pytest.mark.integration @pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) def test_upgrade_table_version(catalog: Catalog) -> None: - if isinstance(catalog, HiveCatalog): - pytest.skip("Not yet implemented: https://github.com/apache/iceberg-python/issues/274") table_test_table_version = catalog.load_table("default.test_table_version") assert table_test_table_version.format_version == 1 From 46b25be424d1ef0f28778b0873c4d91bf117a2a7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Jan 2024 14:18:23 +0100 Subject: [PATCH 036/169] Build: Bump pyarrow from 14.0.2 to 15.0.0 (#295) Bumps [pyarrow](https://github.com/apache/arrow) from 14.0.2 to 15.0.0. - [Commits](https://github.com/apache/arrow/compare/go/v14.0.2...go/v15.0.0) --- updated-dependencies: - dependency-name: pyarrow dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 82 ++++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 44 insertions(+), 40 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8fc927ce95..d905245a08 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2566,6 +2566,7 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -2574,6 +2575,8 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -2638,51 +2641,51 @@ files = [ [[package]] name = "pyarrow" -version = "14.0.2" +version = "15.0.0" description = "Python library for Apache Arrow" optional = true python-versions = ">=3.8" files = [ - {file = "pyarrow-14.0.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:ba9fe808596c5dbd08b3aeffe901e5f81095baaa28e7d5118e01354c64f22807"}, - {file = "pyarrow-14.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22a768987a16bb46220cef490c56c671993fbee8fd0475febac0b3e16b00a10e"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dbba05e98f247f17e64303eb876f4a80fcd32f73c7e9ad975a83834d81f3fda"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a898d134d00b1eca04998e9d286e19653f9d0fcb99587310cd10270907452a6b"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:87e879323f256cb04267bb365add7208f302df942eb943c93a9dfeb8f44840b1"}, - {file = "pyarrow-14.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:76fc257559404ea5f1306ea9a3ff0541bf996ff3f7b9209fc517b5e83811fa8e"}, - {file = "pyarrow-14.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0c4a18e00f3a32398a7f31da47fefcd7a927545b396e1f15d0c85c2f2c778cd"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b"}, - {file = "pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a"}, - {file = "pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02"}, - {file = "pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944"}, - {file = "pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591"}, - {file = "pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379"}, - {file = "pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e354fba8490de258be7687f341bc04aba181fc8aa1f71e4584f9890d9cb2dec2"}, - {file = "pyarrow-14.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20e003a23a13da963f43e2b432483fdd8c38dc8882cd145f09f21792e1cf22a1"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc0de7575e841f1595ac07e5bc631084fd06ca8b03c0f2ecece733d23cd5102a"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e986dc859712acb0bd45601229021f3ffcdfc49044b64c6d071aaf4fa49e98"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f7d029f20ef56673a9730766023459ece397a05001f4e4d13805111d7c2108c0"}, - {file = "pyarrow-14.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:209bac546942b0d8edc8debda248364f7f668e4aad4741bae58e67d40e5fcf75"}, - {file = "pyarrow-14.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1e6987c5274fb87d66bb36816afb6f65707546b3c45c44c28e3c4133c010a881"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a01d0052d2a294a5f56cc1862933014e696aa08cc7b620e8c0cce5a5d362e976"}, - {file = "pyarrow-14.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a51fee3a7db4d37f8cda3ea96f32530620d43b0489d169b285d774da48ca9785"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64df2bf1ef2ef14cee531e2dfe03dd924017650ffaa6f9513d7a1bb291e59c15"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c0fa3bfdb0305ffe09810f9d3e2e50a2787e3a07063001dcd7adae0cee3601a"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c65bf4fd06584f058420238bc47a316e80dda01ec0dfb3044594128a6c2db794"}, - {file = "pyarrow-14.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:63ac901baec9369d6aae1cbe6cca11178fb018a8d45068aaf5bb54f94804a866"}, - {file = "pyarrow-14.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:75ee0efe7a87a687ae303d63037d08a48ef9ea0127064df18267252cfe2e9541"}, - {file = "pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0a524532fd6dd482edaa563b686d754c70417c2f72742a8c990b322d4c03a15d"}, + {file = "pyarrow-15.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a6bdb314affa9c2e0d5dddf3d9cbb9ef4a8dddaa68669975287d47ece67642"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66958fd1771a4d4b754cd385835e66a3ef6b12611e001d4e5edfcef5f30391e2"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f500956a49aadd907eaa21d4fff75f73954605eaa41f61cb94fb008cf2e00c6"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6f87d9c4f09e049c2cade559643424da84c43a35068f2a1c4653dc5b1408a929"}, + {file = "pyarrow-15.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85239b9f93278e130d86c0e6bb455dcb66fc3fd891398b9d45ace8799a871a1e"}, + {file = "pyarrow-15.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b8d43e31ca16aa6e12402fcb1e14352d0d809de70edd185c7650fe80e0769e3"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:fa7cd198280dbd0c988df525e50e35b5d16873e2cdae2aaaa6363cdb64e3eec5"}, + {file = "pyarrow-15.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8780b1a29d3c8b21ba6b191305a2a607de2e30dab399776ff0aa09131e266340"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0ec198ccc680f6c92723fadcb97b74f07c45ff3fdec9dd765deb04955ccf19"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036a7209c235588c2f07477fe75c07e6caced9b7b61bb897c8d4e52c4b5f9555"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2bd8a0e5296797faf9a3294e9fa2dc67aa7f10ae2207920dbebb785c77e9dbe5"}, + {file = "pyarrow-15.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e8ebed6053dbe76883a822d4e8da36860f479d55a762bd9e70d8494aed87113e"}, + {file = "pyarrow-15.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:17d53a9d1b2b5bd7d5e4cd84d018e2a45bc9baaa68f7e6e3ebed45649900ba99"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9950a9c9df24090d3d558b43b97753b8f5867fb8e521f29876aa021c52fda351"}, + {file = "pyarrow-15.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:003d680b5e422d0204e7287bb3fa775b332b3fce2996aa69e9adea23f5c8f970"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f75fce89dad10c95f4bf590b765e3ae98bcc5ba9f6ce75adb828a334e26a3d40"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca9cb0039923bec49b4fe23803807e4ef39576a2bec59c32b11296464623dc2"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ed5a78ed29d171d0acc26a305a4b7f83c122d54ff5270810ac23c75813585e4"}, + {file = "pyarrow-15.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6eda9e117f0402dfcd3cd6ec9bfee89ac5071c48fc83a84f3075b60efa96747f"}, + {file = "pyarrow-15.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a3a6180c0e8f2727e6f1b1c87c72d3254cac909e609f35f22532e4115461177"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:19a8918045993349b207de72d4576af0191beef03ea655d8bdb13762f0cd6eac"}, + {file = "pyarrow-15.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0ec076b32bacb6666e8813a22e6e5a7ef1314c8069d4ff345efa6246bc38593"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db1769e5d0a77eb92344c7382d6543bea1164cca3704f84aa44e26c67e320fb"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2617e3bf9df2a00020dd1c1c6dce5cc343d979efe10bc401c0632b0eef6ef5b"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:d31c1d45060180131caf10f0f698e3a782db333a422038bf7fe01dace18b3a31"}, + {file = "pyarrow-15.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:c8c287d1d479de8269398b34282e206844abb3208224dbdd7166d580804674b7"}, + {file = "pyarrow-15.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:07eb7f07dc9ecbb8dace0f58f009d3a29ee58682fcdc91337dfeb51ea618a75b"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:47af7036f64fce990bb8a5948c04722e4e3ea3e13b1007ef52dfe0aa8f23cf7f"}, + {file = "pyarrow-15.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93768ccfff85cf044c418bfeeafce9a8bb0cee091bd8fd19011aff91e58de540"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ee87fd6892700960d90abb7b17a72a5abb3b64ee0fe8db6c782bcc2d0dc0b4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001fca027738c5f6be0b7a3159cc7ba16a5c52486db18160909a0831b063c4e4"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:d1c48648f64aec09accf44140dccb92f4f94394b8d79976c426a5b79b11d4fa7"}, + {file = "pyarrow-15.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:972a0141be402bb18e3201448c8ae62958c9c7923dfaa3b3d4530c835ac81aed"}, + {file = "pyarrow-15.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f01fc5cf49081426429127aa2d427d9d98e1cb94a32cb961d583a70b7c4504e6"}, + {file = "pyarrow-15.0.0.tar.gz", hash = "sha256:876858f549d540898f927eba4ef77cd549ad8d24baa3207cf1b72e5788b50e83"}, ] [package.dependencies] -numpy = ">=1.16.6" +numpy = ">=1.16.6,<2" [[package]] name = "pyasn1" @@ -3163,6 +3166,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -4217,4 +4221,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "744b1c2e8e96c8626732849a1349f614ba7fdd01491a3488d850979b8192787c" +content-hash = "bdd73f282497b994c64112a042d7818d6ce1728f69e407970d98750e2cc243b7" diff --git a/pyproject.toml b/pyproject.toml index 8766dfe281..c652298926 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ sortedcontainers = "2.4.0" fsspec = ">=2023.1.0,<2024.1.0" pyparsing = ">=3.1.0,<4.0.0" zstandard = ">=0.13.0,<1.0.0" -pyarrow = { version = ">=9.0.0,<15.0.0", optional = true } +pyarrow = { version = ">=9.0.0,<16.0.0", optional = true } pandas = { version = ">=1.0.0,<3.0.0", optional = true } duckdb = { version = ">=0.5.0,<1.0.0", optional = true } ray = { version = ">=2.0.0,<3.0.0", optional = true } From 46d668b403ce94e38e63a44bd56d6f2419cf3d6d Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Tue, 23 Jan 2024 13:43:44 -0800 Subject: [PATCH 037/169] Add install extra for sqlite (#297) * add sql-sqlite * make lint * poetry lock --no-update --- Makefile | 2 +- poetry.lock | 172 ++++++++++++++++++++++++++++++++-- pyiceberg/catalog/__init__.py | 4 +- pyproject.toml | 1 + tests/catalog/test_sql.py | 2 +- 5 files changed, 171 insertions(+), 10 deletions(-) diff --git a/Makefile b/Makefile index 04c57b30e3..4226614bd0 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ install-poetry: pip install poetry==1.7.1 install-dependencies: - poetry install -E pyarrow -E hive -E s3fs -E glue -E adlfs -E duckdb -E ray -E sql-postgres -E gcsfs + poetry install -E pyarrow -E hive -E s3fs -E glue -E adlfs -E duckdb -E ray -E sql-postgres -E gcsfs -E sql-sqlite install: | install-poetry install-dependencies diff --git a/poetry.lock b/poetry.lock index d905245a08..7d166db4d6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "adlfs" version = "2023.10.0" description = "Access Azure Datalake Gen1 with fsspec and dask" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -26,6 +27,7 @@ docs = ["furo", "myst-parser", "numpydoc", "sphinx"] name = "aiobotocore" version = "2.5.4" description = "Async client for aws services using botocore and aiohttp" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -47,6 +49,7 @@ boto3 = ["boto3 (>=1.28.17,<1.28.18)"] name = "aiohttp" version = "3.9.0" description = "Async http client/server framework (asyncio)" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -143,6 +146,7 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aioitertools" version = "0.11.0" description = "itertools and builtins for AsyncIO and mixed iterables" +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -157,6 +161,7 @@ typing_extensions = {version = ">=4.0", markers = "python_version < \"3.10\""} name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -171,6 +176,7 @@ frozenlist = ">=1.1.0" name = "annotated-types" version = "0.5.0" description = "Reusable constraint types to use with typing.Annotated" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -185,6 +191,7 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -196,6 +203,7 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -214,6 +222,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "aws-sam-translator" version = "1.82.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" +category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ @@ -222,18 +231,19 @@ files = [ ] [package.dependencies] -boto3 = ">=1.19.5,<2.dev0" +boto3 = ">=1.19.5,<2.0.0" jsonschema = ">=3.2,<5" pydantic = ">=1.8,<3" typing-extensions = ">=4.4,<5" [package.extras] -dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] +dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.0.0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" version = "2.12.1" description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -249,6 +259,7 @@ wrapt = "*" name = "azure-core" version = "1.29.4" description = "Microsoft Azure Core Library for Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -268,6 +279,7 @@ aio = ["aiohttp (>=3.0)"] name = "azure-datalake-store" version = "0.0.53" description = "Azure Data Lake Store Filesystem Client Library for Python" +category = "main" optional = true python-versions = "*" files = [ @@ -284,6 +296,7 @@ requests = ">=2.20.0" name = "azure-identity" version = "1.14.0" description = "Microsoft Azure Identity Library for Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -301,6 +314,7 @@ msal-extensions = ">=0.3.0,<2.0.0" name = "azure-storage-blob" version = "12.18.1" description = "Microsoft Azure Blob Storage Client Library for Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -321,6 +335,7 @@ aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] name = "blinker" version = "1.7.0" description = "Fast, simple object-to-object and broadcast signaling" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -332,6 +347,7 @@ files = [ name = "boto3" version = "1.28.17" description = "The AWS SDK for Python" +category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -351,6 +367,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] name = "botocore" version = "1.31.17" description = "Low-level, data-driven core of boto 3." +category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -370,6 +387,7 @@ crt = ["awscrt (==0.16.26)"] name = "build" version = "1.0.3" description = "A simple, correct Python build frontend" +category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -394,6 +412,7 @@ virtualenv = ["virtualenv (>=20.0.35)"] name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -405,6 +424,7 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -416,6 +436,7 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = "*" files = [ @@ -492,6 +513,7 @@ pycparser = "*" name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -503,6 +525,7 @@ files = [ name = "cfn-lint" version = "0.83.6" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" +category = "dev" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ @@ -526,6 +549,7 @@ sympy = ">=1.0.0" name = "charset-normalizer" version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -610,6 +634,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -624,6 +649,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -635,6 +661,7 @@ files = [ name = "coverage" version = "7.4.0" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -702,6 +729,7 @@ toml = ["tomli"] name = "cryptography" version = "41.0.6" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -747,6 +775,7 @@ test-randomorder = ["pytest-randomly"] name = "cython" version = "3.0.8" description = "The Cython compiler for writing C extensions in the Python language." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -814,6 +843,7 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" +category = "main" optional = true python-versions = ">=3.5" files = [ @@ -825,6 +855,7 @@ files = [ name = "distlib" version = "0.3.7" description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" files = [ @@ -836,6 +867,7 @@ files = [ name = "docker" version = "7.0.0" description = "A Python library for the Docker Engine API." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -857,6 +889,7 @@ websockets = ["websocket-client (>=1.3.0)"] name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -868,6 +901,7 @@ files = [ name = "duckdb" version = "0.9.2" description = "DuckDB embedded database" +category = "main" optional = true python-versions = ">=3.7.0" files = [ @@ -916,6 +950,7 @@ files = [ name = "ecdsa" version = "0.18.0" description = "ECDSA cryptographic signature library (pure python)" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -934,6 +969,7 @@ gmpy2 = ["gmpy2"] name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -948,6 +984,7 @@ test = ["pytest (>=6)"] name = "fastavro" version = "1.9.3" description = "Fast read/write of AVRO files" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -994,6 +1031,7 @@ zstandard = ["zstandard"] name = "filelock" version = "3.12.4" description = "A platform independent file lock." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1010,6 +1048,7 @@ typing = ["typing-extensions (>=4.7.1)"] name = "flask" version = "3.0.0" description = "A simple framework for building complex web applications." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1033,6 +1072,7 @@ dotenv = ["python-dotenv"] name = "flask-cors" version = "4.0.0" description = "A Flask extension adding a decorator for CORS support" +category = "dev" optional = false python-versions = "*" files = [ @@ -1047,6 +1087,7 @@ Flask = ">=0.9" name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1117,6 +1158,7 @@ files = [ name = "fsspec" version = "2023.9.1" description = "File-system specification" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1152,6 +1194,7 @@ tqdm = ["tqdm"] name = "gcsfs" version = "2023.9.1" description = "Convenient Filesystem interface over GCS" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1176,6 +1219,7 @@ gcsfuse = ["fusepy"] name = "google-api-core" version = "2.11.1" description = "Google API client core library" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1198,6 +1242,7 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-auth" version = "2.23.0" description = "Google Authentication Library" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1222,6 +1267,7 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-auth-oauthlib" version = "1.1.0" description = "Google Authentication Library" +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -1240,6 +1286,7 @@ tool = ["click (>=6.0.0)"] name = "google-cloud-core" version = "2.3.3" description = "Google Cloud API client core library" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1248,7 +1295,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.6,<2.0.0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" [package.extras] @@ -1258,6 +1305,7 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)"] name = "google-cloud-storage" version = "2.11.0" description = "Google Cloud Storage API client library" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1266,7 +1314,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-resumable-media = ">=2.6.0" @@ -1279,6 +1327,7 @@ protobuf = ["protobuf (<5.0.0dev)"] name = "google-crc32c" version = "1.5.0" description = "A python wrapper of the C library 'Google CRC32C'" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1359,6 +1408,7 @@ testing = ["pytest"] name = "google-resumable-media" version = "2.6.0" description = "Utilities for Google Media Downloads and Resumable Uploads" +category = "main" optional = true python-versions = ">= 3.7" files = [ @@ -1377,6 +1427,7 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] name = "googleapis-common-protos" version = "1.60.0" description = "Common protobufs used in Google APIs" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1394,6 +1445,7 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "graphql-core" version = "3.2.3" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +category = "dev" optional = false python-versions = ">=3.6,<4" files = [ @@ -1405,6 +1457,7 @@ files = [ name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" +category = "main" optional = true python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -1482,6 +1535,7 @@ test = ["objgraph", "psutil"] name = "identify" version = "2.5.29" description = "File identification library for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1496,6 +1550,7 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1507,6 +1562,7 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1526,6 +1582,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.1.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1544,6 +1601,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1555,6 +1613,7 @@ files = [ name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" optional = true python-versions = "*" files = [ @@ -1569,6 +1628,7 @@ six = "*" name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1580,6 +1640,7 @@ files = [ name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1597,6 +1658,7 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1608,6 +1670,7 @@ files = [ name = "jschema-to-python" version = "1.2.3" description = "Generate source code for Python classes from a JSON schema." +category = "dev" optional = false python-versions = ">= 2.7" files = [ @@ -1624,6 +1687,7 @@ pbr = "*" name = "jsondiff" version = "2.0.0" description = "Diff JSON and JSON-like structures in Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -1635,6 +1699,7 @@ files = [ name = "jsonpatch" version = "1.33" description = "Apply JSON-Patches (RFC 6902)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1649,6 +1714,7 @@ jsonpointer = ">=1.9" name = "jsonpickle" version = "3.0.2" description = "Python library for serializing any arbitrary object graph into JSON" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1665,6 +1731,7 @@ testing-libs = ["simplejson", "ujson"] name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1676,6 +1743,7 @@ files = [ name = "jsonschema" version = "4.19.1" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1699,6 +1767,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-path" version = "0.3.2" description = "JSONSchema Spec with object-oriented paths" +category = "dev" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ @@ -1716,6 +1785,7 @@ requests = ">=2.31.0,<3.0.0" name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1731,6 +1801,7 @@ referencing = ">=0.28.0" name = "junit-xml" version = "1.9" description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins" +category = "dev" optional = false python-versions = "*" files = [ @@ -1745,6 +1816,7 @@ six = "*" name = "lazy-object-proxy" version = "1.10.0" description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1791,6 +1863,7 @@ files = [ name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1815,6 +1888,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1884,6 +1958,7 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1895,6 +1970,7 @@ files = [ name = "mmhash3" version = "3.0.1" description = "Python wrapper for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +category = "main" optional = false python-versions = "*" files = [ @@ -1938,6 +2014,7 @@ files = [ name = "moto" version = "4.2.13" description = "" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2002,6 +2079,7 @@ xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] name = "mpmath" version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" +category = "dev" optional = false python-versions = "*" files = [ @@ -2019,6 +2097,7 @@ tests = ["pytest (>=4.6)"] name = "msal" version = "1.24.0" description = "The Microsoft Authentication Library (MSAL) for Python library" +category = "main" optional = true python-versions = ">=2.7" files = [ @@ -2038,6 +2117,7 @@ broker = ["pymsalruntime (>=0.13.2,<0.14)"] name = "msal-extensions" version = "1.0.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." +category = "main" optional = true python-versions = "*" files = [ @@ -2056,6 +2136,7 @@ portalocker = [ name = "msgpack" version = "1.0.6" description = "MessagePack serializer" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2121,6 +2202,7 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -2204,6 +2286,7 @@ files = [ name = "mypy-boto3-glue" version = "1.34.7" description = "Type annotations for boto3.Glue 1.34.7 service generated with mypy-boto3-builder 7.23.0" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2218,6 +2301,7 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} name = "networkx" version = "3.1" description = "Python package for creating and manipulating graphs and networks" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2236,6 +2320,7 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -2250,6 +2335,7 @@ setuptools = "*" name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2287,6 +2373,7 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = true python-versions = ">=3.6" files = [ @@ -2303,6 +2390,7 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "openapi-schema-validator" version = "0.6.2" description = "OpenAPI schema validation for Python" +category = "dev" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ @@ -2319,6 +2407,7 @@ rfc3339-validator = "*" name = "openapi-spec-validator" version = "0.7.1" description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" +category = "dev" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ @@ -2337,6 +2426,7 @@ openapi-schema-validator = ">=0.6.0,<0.7.0" name = "packaging" version = "23.1" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2348,6 +2438,7 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2381,8 +2472,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -2415,6 +2506,7 @@ xml = ["lxml (>=4.6.3)"] name = "pathable" version = "0.4.3" description = "Object-oriented paths" +category = "dev" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ @@ -2426,6 +2518,7 @@ files = [ name = "pbr" version = "6.0.0" description = "Python Build Reasonableness" +category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -2437,6 +2530,7 @@ files = [ name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2448,6 +2542,7 @@ files = [ name = "platformdirs" version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2463,6 +2558,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2478,6 +2574,7 @@ testing = ["pytest", "pytest-benchmark"] name = "portalocker" version = "2.8.2" description = "Wraps the portalocker recipe for easy usage" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2497,6 +2594,7 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p name = "pre-commit" version = "3.5.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2515,6 +2613,7 @@ virtualenv = ">=20.10.0" name = "protobuf" version = "4.24.3" description = "" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -2537,6 +2636,7 @@ files = [ name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -2618,6 +2718,7 @@ files = [ name = "py-partiql-parser" version = "0.5.0" description = "Pure Python PartiQL Parser" +category = "dev" optional = false python-versions = "*" files = [ @@ -2632,6 +2733,7 @@ dev = ["black (==22.6.0)", "flake8", "mypy", "pytest"] name = "py4j" version = "0.10.9.7" description = "Enables Python programs to dynamically access arbitrary Java objects" +category = "dev" optional = false python-versions = "*" files = [ @@ -2643,6 +2745,7 @@ files = [ name = "pyarrow" version = "15.0.0" description = "Python library for Apache Arrow" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2691,6 +2794,7 @@ numpy = ">=1.16.6,<2" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2702,6 +2806,7 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" +category = "main" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2716,6 +2821,7 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2727,6 +2833,7 @@ files = [ name = "pydantic" version = "2.5.3" description = "Data validation using Python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2746,6 +2853,7 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.14.6" description = "" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2863,6 +2971,7 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2877,6 +2986,7 @@ plugins = ["importlib-metadata"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -2897,6 +3007,7 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -2911,6 +3022,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyproject-hooks" version = "1.0.0" description = "Wrappers to call pyproject.toml-based build backend hooks." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2925,6 +3037,7 @@ tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} name = "pyspark" version = "3.4.2" description = "Apache Spark Python API" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2945,6 +3058,7 @@ sql = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2967,6 +3081,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-checkdocs" version = "2.10.1" description = "check the README when running tests" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2987,6 +3102,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "pytest-lazy-fixture" version = "0.6.3" description = "It helps to use fixtures in pytest.mark.parametrize" +category = "dev" optional = false python-versions = "*" files = [ @@ -3001,6 +3117,7 @@ pytest = ">=3.2.5" name = "pytest-mock" version = "3.12.0" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3018,6 +3135,7 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3032,6 +3150,7 @@ six = ">=1.5" name = "python-jose" version = "3.3.0" description = "JOSE implementation in Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -3054,6 +3173,7 @@ pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] name = "python-snappy" version = "0.6.1" description = "Python library for the snappy compression library from Google" +category = "main" optional = true python-versions = "*" files = [ @@ -3111,6 +3231,7 @@ files = [ name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" +category = "main" optional = true python-versions = "*" files = [ @@ -3122,6 +3243,7 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "dev" optional = false python-versions = "*" files = [ @@ -3145,6 +3267,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3205,6 +3328,7 @@ files = [ name = "ray" version = "2.7.1" description = "Ray provides a simple, universal API for building distributed applications." +category = "main" optional = true python-versions = "*" files = [ @@ -3268,6 +3392,7 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. name = "referencing" version = "0.30.2" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3283,6 +3408,7 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.10.3" description = "Alternative regular expression module, to replace re." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3380,6 +3506,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3401,6 +3528,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-mock" version = "1.11.0" description = "Mock out responses from the requests package" +category = "dev" optional = false python-versions = "*" files = [ @@ -3420,6 +3548,7 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." +category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3438,6 +3567,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "responses" version = "0.23.3" description = "A utility library for mocking out the `requests` Python library." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3458,6 +3588,7 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3472,6 +3603,7 @@ six = "*" name = "rich" version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -3491,6 +3623,7 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rpds-py" version = "0.10.3" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3597,6 +3730,7 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" +category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -3611,6 +3745,7 @@ pyasn1 = ">=0.1.3" name = "s3fs" version = "2023.9.1" description = "Convenient Filesystem interface over S3" +category = "main" optional = true python-versions = ">= 3.8" files = [ @@ -3631,6 +3766,7 @@ boto3 = ["aiobotocore[boto3] (>=2.5.4,<2.6.0)"] name = "s3transfer" version = "0.6.2" description = "An Amazon S3 Transfer Manager" +category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -3648,6 +3784,7 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] name = "sarif-om" version = "1.0.4" description = "Classes implementing the SARIF 2.1.0 object model." +category = "dev" optional = false python-versions = ">= 2.7" files = [ @@ -3663,6 +3800,7 @@ pbr = "*" name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3679,6 +3817,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3690,6 +3829,7 @@ files = [ name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +category = "main" optional = false python-versions = "*" files = [ @@ -3701,6 +3841,7 @@ files = [ name = "sqlalchemy" version = "2.0.25" description = "Database Abstraction Library" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -3788,6 +3929,7 @@ sqlcipher = ["sqlcipher3_binary"] name = "sshpubkeys" version = "3.3.1" description = "SSH public key parser" +category = "dev" optional = false python-versions = ">=3" files = [ @@ -3806,6 +3948,7 @@ dev = ["twine", "wheel", "yapf"] name = "strictyaml" version = "1.7.3" description = "Strict, typed YAML parser" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -3820,6 +3963,7 @@ python-dateutil = ">=2.6.0" name = "sympy" version = "1.12" description = "Computer algebra system (CAS) in Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3834,6 +3978,7 @@ mpmath = ">=0.19" name = "thrift" version = "0.16.0" description = "Python bindings for the Apache Thrift RPC system" +category = "main" optional = true python-versions = "*" files = [ @@ -3852,6 +3997,7 @@ twisted = ["twisted"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3863,6 +4009,7 @@ files = [ name = "types-pyyaml" version = "6.0.12.11" description = "Typing stubs for PyYAML" +category = "dev" optional = false python-versions = "*" files = [ @@ -3874,6 +4021,7 @@ files = [ name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3885,6 +4033,7 @@ files = [ name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" +category = "main" optional = true python-versions = ">=2" files = [ @@ -3896,6 +4045,7 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -3912,6 +4062,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "virtualenv" version = "20.24.5" description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3932,6 +4083,7 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "werkzeug" version = "3.0.1" description = "The comprehensive WSGI web application library." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3949,6 +4101,7 @@ watchdog = ["watchdog (>=2.3)"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -4033,6 +4186,7 @@ files = [ name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" +category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -4044,6 +4198,7 @@ files = [ name = "yarl" version = "1.9.2" description = "Yet another URL library" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -4131,6 +4286,7 @@ multidict = ">=4.0" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4146,6 +4302,7 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p name = "zstandard" version = "0.22.0" description = "Zstandard bindings for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4216,9 +4373,10 @@ ray = ["pandas", "pyarrow", "ray"] s3fs = ["s3fs"] snappy = ["python-snappy"] sql-postgres = ["psycopg2-binary", "sqlalchemy"] +sql-sqlite = ["sqlalchemy"] zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "bdd73f282497b994c64112a042d7818d6ce1728f69e407970d98750e2cc243b7" +content-hash = "aca2a5ecf024067cf7ab8123908b7dedf1853f8a3b078429591ddb04ec558f9c" diff --git a/pyiceberg/catalog/__init__.py b/pyiceberg/catalog/__init__.py index 4cb6c2ff4a..a39d0e915c 100644 --- a/pyiceberg/catalog/__init__.py +++ b/pyiceberg/catalog/__init__.py @@ -134,7 +134,9 @@ def load_sql(name: str, conf: Properties) -> Catalog: return SqlCatalog(name, **conf) except ImportError as exc: - raise NotInstalledError("SQLAlchemy support not installed: pip install 'pyiceberg[sql-postgres]'") from exc + raise NotInstalledError( + "SQLAlchemy support not installed: pip install 'pyiceberg[sql-postgres]' or pip install 'pyiceberg[sql-sqlite]'" + ) from exc AVAILABLE_CATALOGS: dict[CatalogType, Callable[[str, Properties], Catalog]] = { diff --git a/pyproject.toml b/pyproject.toml index c652298926..059fd8c0bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -113,6 +113,7 @@ adlfs = ["adlfs"] dynamodb = ["boto3"] zstandard = ["zstandard"] sql-postgres = ["sqlalchemy", "psycopg2-binary"] +sql-sqlite = ["sqlalchemy"] gcsfs = ["gcsfs"] [tool.pytest.ini_options] diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index cb39dfadd0..217ea8f535 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -68,7 +68,7 @@ def fixture_another_random_identifier(warehouse: Path, database_name: str, table @pytest.fixture(scope="module") def catalog_memory(warehouse: Path) -> Generator[SqlCatalog, None, None]: props = { - "uri": "sqlite+pysqlite:///:memory:", + "uri": "sqlite:///:memory:", "warehouse": f"file://{warehouse}", } catalog = SqlCatalog("test_sql_catalog", **props) From 4cf1f35dfd3e7cfb2996887e861d740239746306 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 24 Jan 2024 09:30:28 +0100 Subject: [PATCH 038/169] Lock Ray on `<2.8.0` (#298) * Lock Ray on <2.8.0 There seem to be some issues with the later version: https://github.com/apache/iceberg-python/pull/287 * Fix conflicts --- poetry.lock | 2466 +++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 1202 insertions(+), 1266 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7d166db4d6..88ac9115d2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,15 +1,14 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "adlfs" -version = "2023.10.0" +version = "2023.12.0" description = "Access Azure Datalake Gen1 with fsspec and dask" -category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "adlfs-2023.10.0-py3-none-any.whl", hash = "sha256:dfdc8cc782bd78262435fb1bc2a8cfdbdd80342bb1b1ae9dfff968de912b0b09"}, - {file = "adlfs-2023.10.0.tar.gz", hash = "sha256:f5cf06c5b0074d17d43838d4c434791a98420d9e768b36a1a02c7b3930686543"}, + {file = "adlfs-2023.12.0-py3-none-any.whl", hash = "sha256:da6e391afd002c7bb1b75dcc286b78fdf5dbf29aca0f984462033df6311b4e7e"}, + {file = "adlfs-2023.12.0.tar.gz", hash = "sha256:a590694ed9f5a45741e82bff8bcf88c30a790da949310817330b5e7992b8a9e9"}, ] [package.dependencies] @@ -18,117 +17,116 @@ azure-core = ">=1.23.1,<2.0.0" azure-datalake-store = ">=0.0.46,<0.1" azure-identity = "*" azure-storage-blob = ">=12.12.0" -fsspec = ">=2023.9.0" +fsspec = ">=2023.12.0" [package.extras] docs = ["furo", "myst-parser", "numpydoc", "sphinx"] +tests = ["arrow", "dask[dataframe]", "docker", "pytest", "pytest-mock"] [[package]] name = "aiobotocore" -version = "2.5.4" +version = "2.11.0" description = "Async client for aws services using botocore and aiohttp" -category = "main" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "aiobotocore-2.5.4-py3-none-any.whl", hash = "sha256:4b32218728ca3d0be83835b604603a0cd6c329066e884bb78149334267f92440"}, - {file = "aiobotocore-2.5.4.tar.gz", hash = "sha256:60341f19eda77e41e1ab11eef171b5a98b5dbdb90804f5334b6f90e560e31fae"}, + {file = "aiobotocore-2.11.0-py3-none-any.whl", hash = "sha256:6eaf48a6ccd3943ce7d26f75dc8fa0292b7a1a069bd5a9557d37fae5adf14d2d"}, + {file = "aiobotocore-2.11.0.tar.gz", hash = "sha256:4c5b1bf01e7aab74a29bd783159517ecf6d45b5a1647e53302e9554c064e420a"}, ] [package.dependencies] -aiohttp = ">=3.3.1,<4.0.0" +aiohttp = ">=3.7.4.post0,<4.0.0" aioitertools = ">=0.5.1,<1.0.0" -botocore = ">=1.31.17,<1.31.18" +botocore = ">=1.33.2,<1.34.23" wrapt = ">=1.10.10,<2.0.0" [package.extras] -awscli = ["awscli (>=1.29.17,<1.29.18)"] -boto3 = ["boto3 (>=1.28.17,<1.28.18)"] +awscli = ["awscli (>=1.31.2,<1.32.23)"] +boto3 = ["boto3 (>=1.33.2,<1.34.23)"] [[package]] name = "aiohttp" -version = "3.9.0" +version = "3.9.1" description = "Async http client/server framework (asyncio)" -category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6896b8416be9ada4d22cd359d7cb98955576ce863eadad5596b7cdfbf3e17c6c"}, - {file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1736d87dad8ef46a8ec9cddd349fa9f7bd3a064c47dd6469c0d6763d3d49a4fc"}, - {file = "aiohttp-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c9e5f4d7208cda1a2bb600e29069eecf857e6980d0ccc922ccf9d1372c16f4b"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8488519aa05e636c5997719fe543c8daf19f538f4fa044f3ce94bee608817cff"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ab16c254e2312efeb799bc3c06897f65a133b38b69682bf75d1f1ee1a9c43a9"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a94bde005a8f926d0fa38b88092a03dea4b4875a61fbcd9ac6f4351df1b57cd"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b777c9286b6c6a94f50ddb3a6e730deec327e9e2256cb08b5530db0f7d40fd8"}, - {file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571760ad7736b34d05597a1fd38cbc7d47f7b65deb722cb8e86fd827404d1f6b"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:deac0a32aec29608eb25d730f4bc5a261a65b6c48ded1ed861d2a1852577c932"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4ee1b4152bc3190cc40ddd6a14715e3004944263ea208229ab4c297712aa3075"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3607375053df58ed6f23903aa10cf3112b1240e8c799d243bbad0f7be0666986"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:65b0a70a25456d329a5e1426702dde67be0fb7a4ead718005ba2ca582d023a94"}, - {file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a2eb5311a37fe105aa35f62f75a078537e1a9e4e1d78c86ec9893a3c97d7a30"}, - {file = "aiohttp-3.9.0-cp310-cp310-win32.whl", hash = "sha256:2cbc14a13fb6b42d344e4f27746a4b03a2cb0c1c3c5b932b0d6ad8881aa390e3"}, - {file = "aiohttp-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ac9669990e2016d644ba8ae4758688534aabde8dbbc81f9af129c3f5f01ca9cd"}, - {file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f8e05f5163528962ce1d1806fce763ab893b1c5b7ace0a3538cd81a90622f844"}, - {file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4afa8f71dba3a5a2e1e1282a51cba7341ae76585345c43d8f0e624882b622218"}, - {file = "aiohttp-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f929f4c9b9a00f3e6cc0587abb95ab9c05681f8b14e0fe1daecfa83ea90f8318"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28185e36a78d247c55e9fbea2332d16aefa14c5276a582ce7a896231c6b1c208"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a486ddf57ab98b6d19ad36458b9f09e6022de0381674fe00228ca7b741aacb2f"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70e851f596c00f40a2f00a46126c95c2e04e146015af05a9da3e4867cfc55911"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b7bf8fe4d39886adc34311a233a2e01bc10eb4e842220235ed1de57541a896"}, - {file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c67a51ea415192c2e53e4e048c78bab82d21955b4281d297f517707dc836bf3d"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:694df243f394629bcae2d8ed94c589a181e8ba8604159e6e45e7b22e58291113"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3dd8119752dd30dd7bca7d4bc2a92a59be6a003e4e5c2cf7e248b89751b8f4b7"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:eb6dfd52063186ac97b4caa25764cdbcdb4b10d97f5c5f66b0fa95052e744eb7"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d97c3e286d0ac9af6223bc132dc4bad6540b37c8d6c0a15fe1e70fb34f9ec411"}, - {file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:816f4db40555026e4cdda604a1088577c1fb957d02f3f1292e0221353403f192"}, - {file = "aiohttp-3.9.0-cp311-cp311-win32.whl", hash = "sha256:3abf0551874fecf95f93b58f25ef4fc9a250669a2257753f38f8f592db85ddea"}, - {file = "aiohttp-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:e18d92c3e9e22553a73e33784fcb0ed484c9874e9a3e96c16a8d6a1e74a0217b"}, - {file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:99ae01fb13a618b9942376df77a1f50c20a281390dad3c56a6ec2942e266220d"}, - {file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:05857848da443c8c12110d99285d499b4e84d59918a21132e45c3f0804876994"}, - {file = "aiohttp-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:317719d7f824eba55857fe0729363af58e27c066c731bc62cd97bc9c3d9c7ea4"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1e3b3c107ccb0e537f309f719994a55621acd2c8fdf6d5ce5152aed788fb940"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45820ddbb276113ead8d4907a7802adb77548087ff5465d5c554f9aa3928ae7d"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a183f1978802588711aed0dea31e697d760ce9055292db9dc1604daa9a8ded"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a4cd44788ea0b5e6bb8fa704597af3a30be75503a7ed1098bc5b8ffdf6c982"}, - {file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673343fbc0c1ac44d0d2640addc56e97a052504beacd7ade0dc5e76d3a4c16e8"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e8a3b79b6d186a9c99761fd4a5e8dd575a48d96021f220ac5b5fa856e5dd029"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6777a390e41e78e7c45dab43a4a0196c55c3b8c30eebe017b152939372a83253"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7ae5f99a32c53731c93ac3075abd3e1e5cfbe72fc3eaac4c27c9dd64ba3b19fe"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f1e4f254e9c35d8965d377e065c4a8a55d396fe87c8e7e8429bcfdeeb229bfb3"}, - {file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11ca808f9a6b63485059f5f6e164ef7ec826483c1212a44f268b3653c91237d8"}, - {file = "aiohttp-3.9.0-cp312-cp312-win32.whl", hash = "sha256:de3cc86f4ea8b4c34a6e43a7306c40c1275e52bfa9748d869c6b7d54aa6dad80"}, - {file = "aiohttp-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca4fddf84ac7d8a7d0866664936f93318ff01ee33e32381a115b19fb5a4d1202"}, - {file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f09960b5bb1017d16c0f9e9f7fc42160a5a49fa1e87a175fd4a2b1a1833ea0af"}, - {file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8303531e2c17b1a494ffaeba48f2da655fe932c4e9a2626c8718403c83e5dd2b"}, - {file = "aiohttp-3.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4790e44f46a4aa07b64504089def5744d3b6780468c4ec3a1a36eb7f2cae9814"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1d7edf74a36de0e5ca50787e83a77cf352f5504eb0ffa3f07000a911ba353fb"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94697c7293199c2a2551e3e3e18438b4cba293e79c6bc2319f5fd652fccb7456"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1b66dbb8a7d5f50e9e2ea3804b01e766308331d0cac76eb30c563ac89c95985"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9623cfd9e85b76b83ef88519d98326d4731f8d71869867e47a0b979ffec61c73"}, - {file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f32c86dc967ab8c719fd229ce71917caad13cc1e8356ee997bf02c5b368799bf"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f50b4663c3e0262c3a361faf440761fbef60ccdde5fe8545689a4b3a3c149fb4"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dcf71c55ec853826cd70eadb2b6ac62ec577416442ca1e0a97ad875a1b3a0305"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:42fe4fd9f0dfcc7be4248c162d8056f1d51a04c60e53366b0098d1267c4c9da8"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76a86a9989ebf82ee61e06e2bab408aec4ea367dc6da35145c3352b60a112d11"}, - {file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f9e09a1c83521d770d170b3801eea19b89f41ccaa61d53026ed111cb6f088887"}, - {file = "aiohttp-3.9.0-cp38-cp38-win32.whl", hash = "sha256:a00ce44c21612d185c5275c5cba4bab8d7c1590f248638b667ed8a782fa8cd6f"}, - {file = "aiohttp-3.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:d5b9345ab92ebe6003ae11d8092ce822a0242146e6fa270889b9ba965457ca40"}, - {file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98d21092bf2637c5fa724a428a69e8f5955f2182bff61f8036827cf6ce1157bf"}, - {file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35a68cd63ca6aaef5707888f17a70c36efe62b099a4e853d33dc2e9872125be8"}, - {file = "aiohttp-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7f6235c7475658acfc1769d968e07ab585c79f6ca438ddfecaa9a08006aee2"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db04d1de548f7a62d1dd7e7cdf7c22893ee168e22701895067a28a8ed51b3735"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:536b01513d67d10baf6f71c72decdf492fb7433c5f2f133e9a9087379d4b6f31"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c8b0a6487e8109427ccf638580865b54e2e3db4a6e0e11c02639231b41fc0f"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7276fe0017664414fdc3618fca411630405f1aaf0cc3be69def650eb50441787"}, - {file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23170247ef89ffa842a02bbfdc425028574d9e010611659abeb24d890bc53bb8"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b1a2ea8252cacc7fd51df5a56d7a2bb1986ed39be9397b51a08015727dfb69bd"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d71abc15ff7047412ef26bf812dfc8d0d1020d664617f4913df2df469f26b76"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d820162c8c2bdbe97d328cd4f417c955ca370027dce593345e437b2e9ffdc4d"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:2779f5e7c70f7b421915fd47db332c81de365678180a9f3ab404088f87ba5ff9"}, - {file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:366bc870d7ac61726f32a489fbe3d1d8876e87506870be66b01aeb84389e967e"}, - {file = "aiohttp-3.9.0-cp39-cp39-win32.whl", hash = "sha256:1df43596b826022b14998f0460926ce261544fedefe0d2f653e1b20f49e96454"}, - {file = "aiohttp-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:9c196b30f1b1aa3363a69dd69079ae9bec96c2965c4707eaa6914ba099fb7d4f"}, - {file = "aiohttp-3.9.0.tar.gz", hash = "sha256:09f23292d29135025e19e8ff4f0a68df078fe4ee013bca0105b2e803989de92d"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, + {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, + {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, + {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, + {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, + {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, + {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, + {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, + {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, + {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, + {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, + {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, ] [package.dependencies] @@ -146,7 +144,6 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aioitertools" version = "0.11.0" description = "itertools and builtins for AsyncIO and mixed iterables" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -161,7 +158,6 @@ typing_extensions = {version = ">=4.0", markers = "python_version < \"3.10\""} name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -174,14 +170,13 @@ frozenlist = ">=1.1.0" [[package]] name = "annotated-types" -version = "0.5.0" +version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, - {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] [package.dependencies] @@ -191,7 +186,6 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -201,49 +195,47 @@ files = [ [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "aws-sam-translator" -version = "1.82.0" +version = "1.83.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" -category = "dev" optional = false -python-versions = ">=3.7, <=4.0, !=4.0" +python-versions = ">=3.8, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.82.0.tar.gz", hash = "sha256:f78e58194461635aef6255d04e82a9b690e331ca9fd669d1401bf7f9a93ae49f"}, - {file = "aws_sam_translator-1.82.0-py3-none-any.whl", hash = "sha256:29ba61f2a70b2b1cf0c76b92b78a23c7cdd19ea1b0a5992753180b56d040d20b"}, + {file = "aws-sam-translator-1.83.0.tar.gz", hash = "sha256:46025ca8894a56eacd87eb0e4f9af5c01c567c9a734b97fbba353bffd56ba5dc"}, + {file = "aws_sam_translator-1.83.0-py3-none-any.whl", hash = "sha256:022246b4745cc9067f88ab2b051f49898606bcf0222e22b9da41063e5619c6f9"}, ] [package.dependencies] -boto3 = ">=1.19.5,<2.0.0" +boto3 = ">=1.19.5,<2.dev0" jsonschema = ">=3.2,<5" pydantic = ">=1.8,<3" -typing-extensions = ">=4.4,<5" +typing-extensions = ">=4.4" [package.extras] -dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.0.0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] +dev = ["black (==23.10.1)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (>=0.1.0,<0.2.0)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] [[package]] name = "aws-xray-sdk" version = "2.12.1" description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -257,18 +249,17 @@ wrapt = "*" [[package]] name = "azure-core" -version = "1.29.4" +version = "1.29.7" description = "Microsoft Azure Core Library for Python" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "azure-core-1.29.4.tar.gz", hash = "sha256:500b3aa9bf2e90c5ccc88bb105d056114ca0ce7d0ce73afb8bc4d714b2fc7568"}, - {file = "azure_core-1.29.4-py3-none-any.whl", hash = "sha256:b03261bcba22c0b9290faf9999cedd23e849ed2577feee90515694cea6bc74bf"}, + {file = "azure-core-1.29.7.tar.gz", hash = "sha256:2944faf1a7ff1558b1f457cabf60f279869cabaeef86b353bed8eb032c7d8c5e"}, + {file = "azure_core-1.29.7-py3-none-any.whl", hash = "sha256:95a7b41b4af102e5fcdfac9500fcc82ff86e936c7145a099b7848b9ac0501250"}, ] [package.dependencies] -requests = ">=2.18.4" +requests = ">=2.21.0" six = ">=1.11.0" typing-extensions = ">=4.6.0" @@ -279,7 +270,6 @@ aio = ["aiohttp (>=3.0)"] name = "azure-datalake-store" version = "0.0.53" description = "Azure Data Lake Store Filesystem Client Library for Python" -category = "main" optional = true python-versions = "*" files = [ @@ -294,32 +284,30 @@ requests = ">=2.20.0" [[package]] name = "azure-identity" -version = "1.14.0" +version = "1.15.0" description = "Microsoft Azure Identity Library for Python" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "azure-identity-1.14.0.zip", hash = "sha256:72441799f8c5c89bfe21026965e266672a7c5d050c2c65119ef899dd5362e2b1"}, - {file = "azure_identity-1.14.0-py3-none-any.whl", hash = "sha256:edabf0e010eb85760e1dd19424d5e8f97ba2c9caff73a16e7b30ccbdbcce369b"}, + {file = "azure-identity-1.15.0.tar.gz", hash = "sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8"}, + {file = "azure_identity-1.15.0-py3-none-any.whl", hash = "sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912"}, ] [package.dependencies] -azure-core = ">=1.11.0,<2.0.0" +azure-core = ">=1.23.0,<2.0.0" cryptography = ">=2.5" -msal = ">=1.20.0,<2.0.0" +msal = ">=1.24.0,<2.0.0" msal-extensions = ">=0.3.0,<2.0.0" [[package]] name = "azure-storage-blob" -version = "12.18.1" +version = "12.19.0" description = "Microsoft Azure Blob Storage Client Library for Python" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "azure-storage-blob-12.18.1.tar.gz", hash = "sha256:d3265c2403c28d8881326c365e9cf7ed2ad55fdac98404eae753548702b31ba2"}, - {file = "azure_storage_blob-12.18.1-py3-none-any.whl", hash = "sha256:00b92568e91d608c04dfd4814c3b180818e690023493bb984c22dfc1a8a96e55"}, + {file = "azure-storage-blob-12.19.0.tar.gz", hash = "sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897"}, + {file = "azure_storage_blob-12.19.0-py3-none-any.whl", hash = "sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b"}, ] [package.dependencies] @@ -335,7 +323,6 @@ aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] name = "blinker" version = "1.7.0" description = "Fast, simple object-to-object and broadcast signaling" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -345,49 +332,49 @@ files = [ [[package]] name = "boto3" -version = "1.28.17" +version = "1.34.22" description = "The AWS SDK for Python" -category = "main" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "boto3-1.28.17-py3-none-any.whl", hash = "sha256:bca0526f819e0f19c0f1e6eba3e2d1d6b6a92a45129f98c0d716e5aab6d9444b"}, - {file = "boto3-1.28.17.tar.gz", hash = "sha256:90f7cfb5e1821af95b1fc084bc50e6c47fa3edc99f32de1a2591faa0c546bea7"}, + {file = "boto3-1.34.22-py3-none-any.whl", hash = "sha256:5909cd1393143576265c692e908a9ae495492c04a0ffd4bae8578adc2e44729e"}, + {file = "boto3-1.34.22.tar.gz", hash = "sha256:a98c0b86f6044ff8314cc2361e1ef574d674318313ab5606ccb4a6651c7a3f8c"}, ] [package.dependencies] -botocore = ">=1.31.17,<1.32.0" +botocore = ">=1.34.22,<1.35.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.6.0,<0.7.0" +s3transfer = ">=0.10.0,<0.11.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.17" +version = "1.34.22" description = "Low-level, data-driven core of boto 3." -category = "main" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "botocore-1.31.17-py3-none-any.whl", hash = "sha256:6ac34a1d34aa3750e78b77b8596617e2bab938964694d651939dba2cbde2c12b"}, - {file = "botocore-1.31.17.tar.gz", hash = "sha256:396459065dba4339eb4da4ec8b4e6599728eb89b7caaceea199e26f7d824a41c"}, + {file = "botocore-1.34.22-py3-none-any.whl", hash = "sha256:e5f7775975b9213507fbcf846a96b7a2aec2a44fc12a44585197b014a4ab0889"}, + {file = "botocore-1.34.22.tar.gz", hash = "sha256:c47ba4286c576150d1b6ca6df69a87b5deff3d23bd84da8bcf8431ebac3c40ba"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = ">=1.25.4,<1.27" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, +] [package.extras] -crt = ["awscrt (==0.16.26)"] +crt = ["awscrt (==0.19.19)"] [[package]] name = "build" version = "1.0.3" description = "A simple, correct Python build frontend" -category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -410,100 +397,85 @@ virtualenv = ["virtualenv (>=20.0.35)"] [[package]] name = "cachetools" -version = "5.3.1" +version = "5.3.2" description = "Extensible memoizing collections and decorators" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, ] [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -513,7 +485,6 @@ pycparser = "*" name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -523,18 +494,17 @@ files = [ [[package]] name = "cfn-lint" -version = "0.83.6" +version = "0.84.0" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" -category = "dev" optional = false -python-versions = ">=3.7, <=4.0, !=4.0" +python-versions = ">=3.8, <=4.0, !=4.0" files = [ - {file = "cfn-lint-0.83.6.tar.gz", hash = "sha256:4666d3b7ceccab1e6b28cc75731a0a199185060f38a3f97f0207945ccf358d24"}, - {file = "cfn_lint-0.83.6-py3-none-any.whl", hash = "sha256:97f05a3649a9480f88737aefadc926742081692cd31c774be59cdd1f88440795"}, + {file = "cfn-lint-0.84.0.tar.gz", hash = "sha256:7a819ffa48ab23f775037ddb0d9330ba206d547439f69bc72f76f1183d8bc124"}, + {file = "cfn_lint-0.84.0-py3-none-any.whl", hash = "sha256:1bf18bca82c6666fd204a265a203bd307816e8d31fe476301b105c5370e306e5"}, ] [package.dependencies] -aws-sam-translator = ">=1.82.0" +aws-sam-translator = ">=1.83.0" jschema-to-python = ">=1.2.3,<1.3.0" jsonpatch = "*" jsonschema = ">=3.0,<5" @@ -547,94 +517,107 @@ sympy = ">=1.0.0" [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -649,7 +632,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -661,7 +643,6 @@ files = [ name = "coverage" version = "7.4.0" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -727,55 +708,62 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.6" +version = "42.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c"}, - {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d"}, - {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c"}, - {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596"}, - {file = "cryptography-41.0.6-cp37-abi3-win32.whl", hash = "sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660"}, - {file = "cryptography-41.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4"}, - {file = "cryptography-41.0.6.tar.gz", hash = "sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3"}, + {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434"}, + {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b"}, + {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec"}, + {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc"}, + {file = "cryptography-42.0.0-cp37-abi3-win32.whl", hash = "sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4"}, + {file = "cryptography-42.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0"}, + {file = "cryptography-42.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139"}, + {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81"}, + {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221"}, + {file = "cryptography-42.0.0-cp39-abi3-win32.whl", hash = "sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b"}, + {file = "cryptography-42.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f"}, + {file = "cryptography-42.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0"}, + {file = "cryptography-42.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce"}, + {file = "cryptography-42.0.0.tar.gz", hash = "sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] name = "cython" version = "3.0.8" description = "The Cython compiler for writing C extensions in the Python language." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -843,7 +831,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = true python-versions = ">=3.5" files = [ @@ -853,21 +840,19 @@ files = [ [[package]] name = "distlib" -version = "0.3.7" +version = "0.3.8" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] [[package]] name = "docker" version = "7.0.0" description = "A Python library for the Docker Engine API." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -889,7 +874,6 @@ websockets = ["websocket-client (>=1.3.0)"] name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -901,7 +885,6 @@ files = [ name = "duckdb" version = "0.9.2" description = "DuckDB embedded database" -category = "main" optional = true python-versions = ">=3.7.0" files = [ @@ -950,7 +933,6 @@ files = [ name = "ecdsa" version = "0.18.0" description = "ECDSA cryptographic signature library (pure python)" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -967,14 +949,13 @@ gmpy2 = ["gmpy2"] [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -984,7 +965,6 @@ test = ["pytest (>=6)"] name = "fastavro" version = "1.9.3" description = "Fast read/write of AVRO files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1029,31 +1009,29 @@ zstandard = ["zstandard"] [[package]] name = "filelock" -version = "3.12.4" +version = "3.13.1" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, - {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] -typing = ["typing-extensions (>=4.7.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "flask" -version = "3.0.0" +version = "3.0.1" description = "A simple framework for building complex web applications." -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "flask-3.0.0-py3-none-any.whl", hash = "sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638"}, - {file = "flask-3.0.0.tar.gz", hash = "sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58"}, + {file = "flask-3.0.1-py3-none-any.whl", hash = "sha256:ca631a507f6dfe6c278ae20112cea3ff54ff2216390bf8880f6b035a5354af13"}, + {file = "flask-3.0.1.tar.gz", hash = "sha256:6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403"}, ] [package.dependencies] @@ -1072,7 +1050,6 @@ dotenv = ["python-dotenv"] name = "flask-cors" version = "4.0.0" description = "A Flask extension adding a decorator for CORS support" -category = "dev" optional = false python-versions = "*" files = [ @@ -1085,85 +1062,99 @@ Flask = ">=0.9" [[package]] name = "frozenlist" -version = "1.4.0" +version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, - {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, - {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, - {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, - {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, - {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, - {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, - {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, - {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, - {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] [[package]] name = "fsspec" -version = "2023.9.1" +version = "2023.12.2" description = "File-system specification" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.9.1-py3-none-any.whl", hash = "sha256:99a974063b6cced36cfaa61aa8efb05439c6fea2dafe65930e7ab46f9d2f8930"}, - {file = "fsspec-2023.9.1.tar.gz", hash = "sha256:da8cfe39eeb65aaa69074d5e0e4bbc9b7ef72d69c0587a31cab981eefdb3da13"}, + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, ] [package.extras] @@ -1192,20 +1183,19 @@ tqdm = ["tqdm"] [[package]] name = "gcsfs" -version = "2023.9.1" +version = "2023.12.2.post1" description = "Convenient Filesystem interface over GCS" -category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "gcsfs-2023.9.1-py2.py3-none-any.whl", hash = "sha256:c673caf901fc923d121399782394aba9c99dac4332a15ba21bd320d0b7f46521"}, - {file = "gcsfs-2023.9.1.tar.gz", hash = "sha256:47698bba0468896bfb33749552fe459fa745069119d7c2605a53bf4f3a1f09ac"}, + {file = "gcsfs-2023.12.2.post1-py2.py3-none-any.whl", hash = "sha256:4123cee2c44118d4c0c0f7405abe7610dd2d87087857520c6a7769765ec51d43"}, + {file = "gcsfs-2023.12.2.post1.tar.gz", hash = "sha256:e38b7e59580a1e490d62d55a47cba33b49a941b01917c3d6f6cfd2563371ab7b"}, ] [package.dependencies] aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" decorator = ">4.1.2" -fsspec = "2023.9.1" +fsspec = "2023.12.2" google-auth = ">=1.2" google-auth-oauthlib = "*" google-cloud-storage = "*" @@ -1217,14 +1207,13 @@ gcsfuse = ["fusepy"] [[package]] name = "google-api-core" -version = "2.11.1" +version = "2.15.0" description = "Google API client core library" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, ] [package.dependencies] @@ -1240,21 +1229,19 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.23.0" +version = "2.26.2" description = "Google Authentication Library" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "google-auth-2.23.0.tar.gz", hash = "sha256:753a26312e6f1eaeec20bc6f2644a10926697da93446e1f8e24d6d32d45a922a"}, - {file = "google_auth-2.23.0-py2.py3-none-any.whl", hash = "sha256:2cec41407bd1e207f5b802638e32bb837df968bb5c05f413d0fa526fac4cf7a7"}, + {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, + {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" -urllib3 = "<2.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] @@ -1265,14 +1252,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-auth-oauthlib" -version = "1.1.0" +version = "1.2.0" description = "Google Authentication Library" -category = "main" optional = true python-versions = ">=3.6" files = [ - {file = "google-auth-oauthlib-1.1.0.tar.gz", hash = "sha256:83ea8c3b0881e453790baff4448e8a6112ac8778d1de9da0b68010b843937afb"}, - {file = "google_auth_oauthlib-1.1.0-py2.py3-none-any.whl", hash = "sha256:089c6e587d36f4803ac7e0720c045c6a8b1fd1790088b8424975b90d0ee61c12"}, + {file = "google-auth-oauthlib-1.2.0.tar.gz", hash = "sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8"}, + {file = "google_auth_oauthlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf"}, ] [package.dependencies] @@ -1284,39 +1270,38 @@ tool = ["click (>=6.0.0)"] [[package]] name = "google-cloud-core" -version = "2.3.3" +version = "2.4.1" description = "Google Cloud API client core library" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"}, - {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"}, + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, ] [package.dependencies] -google-api-core = ">=1.31.6,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" [package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)"] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "2.11.0" +version = "2.14.0" description = "Google Cloud Storage API client library" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.11.0.tar.gz", hash = "sha256:6fbf62659b83c8f3a0a743af0d661d2046c97c3a5bfb587c4662c4bc68de3e31"}, - {file = "google_cloud_storage-2.11.0-py2.py3-none-any.whl", hash = "sha256:88cbd7fb3d701c780c4272bc26952db99f25eb283fb4c2208423249f00b5fe53"}, + {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, + {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=2.23.3,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" google-resumable-media = ">=2.6.0" requests = ">=2.18.0,<3.0.0dev" @@ -1327,7 +1312,6 @@ protobuf = ["protobuf (<5.0.0dev)"] name = "google-crc32c" version = "1.5.0" description = "A python wrapper of the C library 'Google CRC32C'" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1406,14 +1390,13 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.6.0" +version = "2.7.0" description = "Utilities for Google Media Downloads and Resumable Uploads" -category = "main" optional = true python-versions = ">= 3.7" files = [ - {file = "google-resumable-media-2.6.0.tar.gz", hash = "sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7"}, - {file = "google_resumable_media-2.6.0-py2.py3-none-any.whl", hash = "sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b"}, + {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, + {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, ] [package.dependencies] @@ -1425,14 +1408,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.60.0" +version = "1.62.0" description = "Common protobufs used in Google APIs" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, - {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, ] [package.dependencies] @@ -1445,7 +1427,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "graphql-core" version = "3.2.3" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." -category = "dev" optional = false python-versions = ">=3.6,<4" files = [ @@ -1455,92 +1436,84 @@ files = [ [[package]] name = "greenlet" -version = "2.0.2" +version = "3.0.3" description = "Lightweight in-process concurrent programming" -category = "main" optional = true -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, ] [package.extras] -docs = ["Sphinx", "docutils (<0.18)"] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] name = "identify" -version = "2.5.29" +version = "2.5.33" description = "File identification library for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.29-py2.py3-none-any.whl", hash = "sha256:24437fbf6f4d3fe6efd0eb9d67e24dd9106db99af5ceb27996a5f7895f24bf1b"}, - {file = "identify-2.5.29.tar.gz", hash = "sha256:d43d52b86b15918c137e3a74fff5224f60385cd0e9c38e99d07c257f02f151a5"}, + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, ] [package.extras] @@ -1548,60 +1521,56 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "7.0.1" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" -version = "6.1.0" +version = "5.13.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, - {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, + {file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"}, + {file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1613,7 +1582,6 @@ files = [ name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" -category = "main" optional = true python-versions = "*" files = [ @@ -1628,7 +1596,6 @@ six = "*" name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1638,14 +1605,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1658,7 +1624,6 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1670,7 +1635,6 @@ files = [ name = "jschema-to-python" version = "1.2.3" description = "Generate source code for Python classes from a JSON schema." -category = "dev" optional = false python-versions = ">= 2.7" files = [ @@ -1687,7 +1651,6 @@ pbr = "*" name = "jsondiff" version = "2.0.0" description = "Diff JSON and JSON-like structures in Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -1699,7 +1662,6 @@ files = [ name = "jsonpatch" version = "1.33" description = "Apply JSON-Patches (RFC 6902)" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1714,7 +1676,6 @@ jsonpointer = ">=1.9" name = "jsonpickle" version = "3.0.2" description = "Python library for serializing any arbitrary object graph into JSON" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1731,7 +1692,6 @@ testing-libs = ["simplejson", "ujson"] name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1741,14 +1701,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.19.1" +version = "4.21.1" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, - {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, ] [package.dependencies] @@ -1764,44 +1723,41 @@ format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validat format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] -name = "jsonschema-path" -version = "0.3.2" +name = "jsonschema-spec" +version = "0.1.3" description = "JSONSchema Spec with object-oriented paths" -category = "dev" optional = false -python-versions = ">=3.8.0,<4.0.0" +python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "jsonschema_path-0.3.2-py3-none-any.whl", hash = "sha256:271aedfefcd161a0f467bdf23e1d9183691a61eaabf4b761046a914e369336c7"}, - {file = "jsonschema_path-0.3.2.tar.gz", hash = "sha256:4d0dababf341e36e9b91a5fb2a3e3fd300b0150e7fe88df4e55cc8253c5a3989"}, + {file = "jsonschema_spec-0.1.3-py3-none-any.whl", hash = "sha256:b3cde007ad65c2e631e2f8653cf187124a2c714d02d9fafbab68ad64bf5745d6"}, + {file = "jsonschema_spec-0.1.3.tar.gz", hash = "sha256:8d8db7c255e524fab1016a952a9143e5b6e3c074f4ed25d1878f8e97806caec0"}, ] [package.dependencies] +jsonschema = ">=4.0.0,<5.0.0" pathable = ">=0.4.1,<0.5.0" PyYAML = ">=5.1" -referencing = ">=0.28.0,<0.32.0" -requests = ">=2.31.0,<3.0.0" +typing-extensions = ">=4.3.0,<5.0.0" [[package]] name = "jsonschema-specifications" -version = "2023.7.1" +version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, - {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, ] [package.dependencies] importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -referencing = ">=0.28.0" +referencing = ">=0.31.0" [[package]] name = "junit-xml" version = "1.9" description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins" -category = "dev" optional = false python-versions = "*" files = [ @@ -1816,7 +1772,6 @@ six = "*" name = "lazy-object-proxy" version = "1.10.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1863,7 +1818,6 @@ files = [ name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1886,79 +1840,77 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, + {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, ] [[package]] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1970,7 +1922,6 @@ files = [ name = "mmhash3" version = "3.0.1" description = "Python wrapper for MurmurHash (MurmurHash3), a set of fast and robust hash functions." -category = "main" optional = false python-versions = "*" files = [ @@ -2014,7 +1965,6 @@ files = [ name = "moto" version = "4.2.13" description = "" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2079,7 +2029,6 @@ xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] name = "mpmath" version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" -category = "dev" optional = false python-versions = "*" files = [ @@ -2095,14 +2044,13 @@ tests = ["pytest (>=4.6)"] [[package]] name = "msal" -version = "1.24.0" -description = "The Microsoft Authentication Library (MSAL) for Python library" -category = "main" +version = "1.26.0" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = true python-versions = ">=2.7" files = [ - {file = "msal-1.24.0-py2.py3-none-any.whl", hash = "sha256:a7f2f342b80ba3fe168218003b6798cc81b83c9745284bf63fb8d4ec8e2dbc50"}, - {file = "msal-1.24.0.tar.gz", hash = "sha256:7d2ecdad41a5f73bb2b813f3061a4cf47c924621105a8ed137586fcb9d8f827e"}, + {file = "msal-1.26.0-py2.py3-none-any.whl", hash = "sha256:be77ba6a8f49c9ff598bbcdc5dfcf1c9842f3044300109af738e8c3e371065b5"}, + {file = "msal-1.26.0.tar.gz", hash = "sha256:224756079fe338be838737682b49f8ebc20a87c1c5eeaf590daae4532b83de15"}, ] [package.dependencies] @@ -2115,94 +2063,92 @@ broker = ["pymsalruntime (>=0.13.2,<0.14)"] [[package]] name = "msal-extensions" -version = "1.0.0" +version = "1.1.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." -category = "main" optional = true -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "msal-extensions-1.0.0.tar.gz", hash = "sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354"}, - {file = "msal_extensions-1.0.0-py2.py3-none-any.whl", hash = "sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee"}, + {file = "msal-extensions-1.1.0.tar.gz", hash = "sha256:6ab357867062db7b253d0bd2df6d411c7891a0ee7308d54d1e4317c1d1c54252"}, + {file = "msal_extensions-1.1.0-py3-none-any.whl", hash = "sha256:01be9711b4c0b1a151450068eeb2c4f0997df3bba085ac299de3a66f585e382f"}, ] [package.dependencies] msal = ">=0.4.1,<2.0.0" +packaging = "*" portalocker = [ - {version = ">=1.0,<3", markers = "python_version >= \"3.5\" and platform_system != \"Windows\""}, - {version = ">=1.6,<3", markers = "python_version >= \"3.5\" and platform_system == \"Windows\""}, + {version = ">=1.0,<3", markers = "platform_system != \"Windows\""}, + {version = ">=1.6,<3", markers = "platform_system == \"Windows\""}, ] [[package]] name = "msgpack" -version = "1.0.6" +version = "1.0.7" description = "MessagePack serializer" -category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f4321692e7f299277e55f322329b2c972d93bb612d85f3fda8741bec5c6285ce"}, - {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f0e36a5fa7a182cde391a128a64f437657d2b9371dfa42eda3436245adccbf5"}, - {file = "msgpack-1.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5c8dd9a386a66e50bd7fa22b7a49fb8ead2b3574d6bd69eb1caced6caea0803"}, - {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f85200ea102276afdd3749ca94747f057bbb868d1c52921ee2446730b508d0f"}, - {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a006c300e82402c0c8f1ded11352a3ba2a61b87e7abb3054c845af2ca8d553c"}, - {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bbf47ea5a6ff20c23426106e81863cdbb5402de1825493026ce615039cc99d"}, - {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04450e4b5e1e662e7c86b6aafb7c230af9334fd0becf5e6b80459a507884241c"}, - {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b06a5095a79384760625b5de3f83f40b3053a385fb893be8a106fbbd84c14980"}, - {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3910211b0ab20be3a38e0bb944ed45bd4265d8d9f11a3d1674b95b298e08dd5c"}, - {file = "msgpack-1.0.6-cp310-cp310-win32.whl", hash = "sha256:1dc67b40fe81217b308ab12651adba05e7300b3a2ccf84d6b35a878e308dd8d4"}, - {file = "msgpack-1.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:885de1ed5ea01c1bfe0a34c901152a264c3c1f8f1d382042b92ea354bd14bb0e"}, - {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:099c3d8a027367e1a6fc55d15336f04ff65c60c4f737b5739f7db4525c65fe9e"}, - {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b88dc97ba86c96b964c3745a445d9a65f76fe21955a953064fe04adb63e9367"}, - {file = "msgpack-1.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:00ce5f827d4f26fc094043e6f08b6069c1b148efa2631c47615ae14fb6cafc89"}, - {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd6af61388be65a8701f5787362cb54adae20007e0cc67ca9221a4b95115583b"}, - {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:652e4b7497825b0af6259e2c54700e6dc33d2fc4ed92b8839435090d4c9cc911"}, - {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b08676a17e3f791daad34d5fcb18479e9c85e7200d5a17cbe8de798643a7e37"}, - {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:229ccb6713c8b941eaa5cf13dc7478eba117f21513b5893c35e44483e2f0c9c8"}, - {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:95ade0bd4cf69e04e8b8f8ec2d197d9c9c4a9b6902e048dc7456bf6d82e12a80"}, - {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b16344032a27b2ccfd341f89dadf3e4ef6407d91e4b93563c14644a8abb3ad7"}, - {file = "msgpack-1.0.6-cp311-cp311-win32.whl", hash = "sha256:55bb4a1bf94e39447bc08238a2fb8a767460388a8192f67c103442eb36920887"}, - {file = "msgpack-1.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:ae97504958d0bc58c1152045c170815d5c4f8af906561ce044b6358b43d0c97e"}, - {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ecf431786019a7bfedc28281531d706627f603e3691d64eccdbce3ecd353823"}, - {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a635aecf1047255576dbb0927cbf9a7aa4a68e9d54110cc3c926652d18f144e0"}, - {file = "msgpack-1.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:102cfb54eaefa73e8ca1e784b9352c623524185c98e057e519545131a56fb0af"}, - {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c5e05e4f5756758c58a8088aa10dc70d851c89f842b611fdccfc0581c1846bc"}, - {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68569509dd015fcdd1e6b2b3ccc8c51fd27d9a97f461ccc909270e220ee09685"}, - {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf652839d16de91fe1cfb253e0a88db9a548796939533894e07f45d4bdf90a5f"}, - {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14db7e1b7a7ed362b2f94897bf2486c899c8bb50f6e34b2db92fe534cdab306f"}, - {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:159cfec18a6e125dd4723e2b1de6f202b34b87c850fb9d509acfd054c01135e9"}, - {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6a01a072b2219b65a6ff74df208f20b2cac9401c60adb676ee34e53b4c651077"}, - {file = "msgpack-1.0.6-cp312-cp312-win32.whl", hash = "sha256:e36560d001d4ba469d469b02037f2dd404421fd72277d9474efe9f03f83fced5"}, - {file = "msgpack-1.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:5e7fae9ca93258a956551708cf60dc6c8145574e32ce8c8c4d894e63bcb04341"}, - {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:40b801b768f5a765e33c68f30665d3c6ee1c8623a2d2bb78e6e59f2db4e4ceb7"}, - {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da057d3652e698b00746e47f06dbb513314f847421e857e32e1dc61c46f6c052"}, - {file = "msgpack-1.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f75114c05ec56566da6b55122791cf5bb53d5aada96a98c016d6231e03132f76"}, - {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61213482b5a387ead9e250e9e3cb290292feca39dc83b41c3b1b7b8ffc8d8ecb"}, - {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae6c561f11b444b258b1b4be2bdd1e1cf93cd1d80766b7e869a79db4543a8a8"}, - {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:619a63753ba9e792fe3c6c0fc2b9ee2cfbd92153dd91bee029a89a71eb2942cd"}, - {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:70843788c85ca385846a2d2f836efebe7bb2687ca0734648bf5c9dc6c55602d2"}, - {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fb4571efe86545b772a4630fee578c213c91cbcfd20347806e47fd4e782a18fe"}, - {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bbb4448a05d261fae423d5c0b0974ad899f60825bc77eabad5a0c518e78448c2"}, - {file = "msgpack-1.0.6-cp38-cp38-win32.whl", hash = "sha256:5cd67674db3c73026e0a2c729b909780e88bd9cbc8184256f9567640a5d299a8"}, - {file = "msgpack-1.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:a1cf98afa7ad5e7012454ca3fde254499a13f9d92fd50cb46118118a249a1355"}, - {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d6d25b8a5c70e2334ed61a8da4c11cd9b97c6fbd980c406033f06e4463fda006"}, - {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88cdb1da7fdb121dbb3116910722f5acab4d6e8bfcacab8fafe27e2e7744dc6a"}, - {file = "msgpack-1.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b5658b1f9e486a2eec4c0c688f213a90085b9cf2fec76ef08f98fdf6c62f4b9"}, - {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76820f2ece3b0a7c948bbb6a599020e29574626d23a649476def023cbb026787"}, - {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c780d992f5d734432726b92a0c87bf1857c3d85082a8dea29cbf56e44a132b3"}, - {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0ed35d6d6122d0baa9a1b59ebca4ee302139f4cfb57dab85e4c73ab793ae7ed"}, - {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32c0aff31f33033f4961abc01f78497e5e07bac02a508632aef394b384d27428"}, - {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:35ad5aed9b52217d4cea739d0ea3a492a18dd86fecb4b132668a69f27fb0363b"}, - {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47275ff73005a3e5e146e50baa2378e1730cba6e292f0222bc496a8e4c4adfc8"}, - {file = "msgpack-1.0.6-cp39-cp39-win32.whl", hash = "sha256:7baf16fd8908a025c4a8d7b699103e72d41f967e2aee5a2065432bcdbd9fd06e"}, - {file = "msgpack-1.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:fc97aa4b4fb928ff4d3b74da7c30b360d0cb3ede49a5a6e1fd9705f49aea1deb"}, - {file = "msgpack-1.0.6.tar.gz", hash = "sha256:25d3746da40f3c8c59c3b1d001e49fd2aa17904438f980d9a391370366df001e"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"}, + {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"}, + {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"}, + {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"}, + {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"}, + {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"}, + {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"}, + {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"}, + {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"}, + {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"}, + {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"}, + {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, ] [[package]] name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -2286,7 +2232,6 @@ files = [ name = "mypy-boto3-glue" version = "1.34.7" description = "Type annotations for boto3.Glue 1.34.7 service generated with mypy-boto3-builder 7.23.0" -category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2301,7 +2246,6 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} name = "networkx" version = "3.1" description = "Python package for creating and manipulating graphs and networks" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2320,7 +2264,6 @@ test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" -category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -2335,7 +2278,6 @@ setuptools = "*" name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2369,11 +2311,55 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "numpy" +version = "1.26.3" +description = "Fundamental package for array computing in Python" +optional = true +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, + {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, + {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, +] + [[package]] name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = true python-versions = ">=3.6" files = [ @@ -2388,57 +2374,55 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "openapi-schema-validator" -version = "0.6.2" +version = "0.4.3" description = "OpenAPI schema validation for Python" -category = "dev" optional = false -python-versions = ">=3.8.0,<4.0.0" +python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "openapi_schema_validator-0.6.2-py3-none-any.whl", hash = "sha256:c4887c1347c669eb7cded9090f4438b710845cd0f90d1fb9e1b3303fb37339f8"}, - {file = "openapi_schema_validator-0.6.2.tar.gz", hash = "sha256:11a95c9c9017912964e3e5f2545a5b11c3814880681fcacfb73b1759bb4f2804"}, + {file = "openapi_schema_validator-0.4.3-py3-none-any.whl", hash = "sha256:f1eff2a7936546a3ce62b88a17d09de93c9bd229cbc43cb696c988a61a382548"}, + {file = "openapi_schema_validator-0.4.3.tar.gz", hash = "sha256:6940dba9f4906c97078fea6fd9d5a3a3384207db368c4e32f6af6abd7c5c560b"}, ] [package.dependencies] -jsonschema = ">=4.19.1,<5.0.0" -jsonschema-specifications = ">=2023.5.2,<2024.0.0" +jsonschema = ">=4.0.0,<5.0.0" rfc3339-validator = "*" [[package]] name = "openapi-spec-validator" -version = "0.7.1" +version = "0.5.5" description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" -category = "dev" optional = false -python-versions = ">=3.8.0,<4.0.0" +python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "openapi_spec_validator-0.7.1-py3-none-any.whl", hash = "sha256:3c81825043f24ccbcd2f4b149b11e8231abce5ba84f37065e14ec947d8f4e959"}, - {file = "openapi_spec_validator-0.7.1.tar.gz", hash = "sha256:8577b85a8268685da6f8aa30990b83b7960d4d1117e901d451b5d572605e5ec7"}, + {file = "openapi_spec_validator-0.5.5-py3-none-any.whl", hash = "sha256:93ba247f585e1447214b4207728a7cce3726d148238217be69e6b8725c118fbe"}, + {file = "openapi_spec_validator-0.5.5.tar.gz", hash = "sha256:3010df5237748e25d7fac2b2aaf13457c1afd02735b2bd6f008a10079c8f443a"}, ] [package.dependencies] -importlib-resources = {version = ">=5.8,<7.0", markers = "python_version < \"3.9\""} -jsonschema = ">=4.18.0,<5.0.0" -jsonschema-path = ">=0.3.1,<0.4.0" +importlib-resources = {version = ">=5.8.0,<6.0.0", markers = "python_version < \"3.9\""} +jsonschema = ">=4.0.0,<5.0.0" +jsonschema-spec = ">=0.1.1,<0.2.0" lazy-object-proxy = ">=1.7.1,<2.0.0" -openapi-schema-validator = ">=0.6.0,<0.7.0" +openapi-schema-validator = ">=0.4.2,<0.5.0" + +[package.extras] +requests = ["requests"] [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2472,7 +2456,7 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" @@ -2506,7 +2490,6 @@ xml = ["lxml (>=4.6.3)"] name = "pathable" version = "0.4.3" description = "Object-oriented paths" -category = "dev" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ @@ -2518,7 +2501,6 @@ files = [ name = "pbr" version = "6.0.0" description = "Python Build Reasonableness" -category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -2530,7 +2512,6 @@ files = [ name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2540,14 +2521,13 @@ files = [ [[package]] name = "platformdirs" -version = "3.10.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -2558,7 +2538,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2574,7 +2553,6 @@ testing = ["pytest", "pytest-benchmark"] name = "portalocker" version = "2.8.2" description = "Wraps the portalocker recipe for easy usage" -category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2594,7 +2572,6 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p name = "pre-commit" version = "3.5.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2611,32 +2588,28 @@ virtualenv = ">=20.10.0" [[package]] name = "protobuf" -version = "4.24.3" +version = "4.25.2" description = "" -category = "main" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"}, - {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"}, - {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"}, - {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"}, - {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"}, - {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"}, - {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"}, - {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"}, - {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"}, - {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"}, - {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"}, - {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"}, - {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"}, + {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, + {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, + {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, + {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, + {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, + {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, + {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, + {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, + {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, + {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, ] [[package]] name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -2666,7 +2639,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -2675,8 +2647,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -2718,7 +2688,6 @@ files = [ name = "py-partiql-parser" version = "0.5.0" description = "Pure Python PartiQL Parser" -category = "dev" optional = false python-versions = "*" files = [ @@ -2733,7 +2702,6 @@ dev = ["black (==22.6.0)", "flake8", "mypy", "pytest"] name = "py4j" version = "0.10.9.7" description = "Enables Python programs to dynamically access arbitrary Java objects" -category = "dev" optional = false python-versions = "*" files = [ @@ -2745,7 +2713,6 @@ files = [ name = "pyarrow" version = "15.0.0" description = "Python library for Apache Arrow" -category = "main" optional = true python-versions = ">=3.8" files = [ @@ -2792,21 +2759,19 @@ numpy = ">=1.16.6,<2" [[package]] name = "pyasn1" -version = "0.5.0" +version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, - {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, ] [[package]] name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2821,7 +2786,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2833,7 +2797,6 @@ files = [ name = "pydantic" version = "2.5.3" description = "Data validation using Python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2853,7 +2816,6 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.14.6" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2969,24 +2931,23 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" -version = "2.16.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -3007,7 +2968,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -3022,7 +2982,6 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyproject-hooks" version = "1.0.0" description = "Wrappers to call pyproject.toml-based build backend hooks." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3037,7 +2996,6 @@ tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} name = "pyspark" version = "3.4.2" description = "Apache Spark Python API" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3058,7 +3016,6 @@ sql = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3081,7 +3038,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-checkdocs" version = "2.10.1" description = "check the README when running tests" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3102,7 +3058,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "pytest-lazy-fixture" version = "0.6.3" description = "It helps to use fixtures in pytest.mark.parametrize" -category = "dev" optional = false python-versions = "*" files = [ @@ -3117,7 +3072,6 @@ pytest = ">=3.2.5" name = "pytest-mock" version = "3.12.0" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3135,7 +3089,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3150,7 +3103,6 @@ six = ">=1.5" name = "python-jose" version = "3.3.0" description = "JOSE implementation in Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -3173,7 +3125,6 @@ pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] name = "python-snappy" version = "0.6.1" description = "Python library for the snappy compression library from Google" -category = "main" optional = true python-versions = "*" files = [ @@ -3231,7 +3182,6 @@ files = [ name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" -category = "main" optional = true python-versions = "*" files = [ @@ -3243,7 +3193,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "dev" optional = false python-versions = "*" files = [ @@ -3267,7 +3216,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3289,7 +3237,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3326,36 +3273,35 @@ files = [ [[package]] name = "ray" -version = "2.7.1" +version = "2.7.2" description = "Ray provides a simple, universal API for building distributed applications." -category = "main" optional = true python-versions = "*" files = [ - {file = "ray-2.7.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:4a2c98ab42881836894f20408ce40c0fd7fe5da7f0bc69cf22c951ccceda55ed"}, - {file = "ray-2.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:53800aadfc07152bc8672d5fa91bb4dc17d96b572a9bd436dd00fd2e0d07ef6a"}, - {file = "ray-2.7.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:17a425b4a2c2098f78fd0ab3831a35a53608d36466453e90c30a6495e9dce354"}, - {file = "ray-2.7.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:9681a8a7bf081e2244360206f3cd80d1a6adb4dc6330a507fd8c78ebe6e57365"}, - {file = "ray-2.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:148c77050ceab3c90739147bb86ac535e9590046cc36364ae9eb15469ea16fbc"}, - {file = "ray-2.7.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:0b0e80e26d6899820c12301626a74a209ab29373f46caf5b48c3ae3f99ec1bc7"}, - {file = "ray-2.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b5d13e910bb3449ef7b25084dcc4f0b9a763d3aa7b2fdd39e3b4d93d8c266951"}, - {file = "ray-2.7.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:0a6e8a736fe5294a0b0064679e59e393c66942db81fdf95804bdc1495d1f1651"}, - {file = "ray-2.7.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:f4c9f8a813444bd5346756db1a6d6e09a805b28b5fb6831e91b8d1324c12a888"}, - {file = "ray-2.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:85a8b0f122e4c14d2ee354fce9651834f7ffc9b60ebdce023a5ba8ca5841a6ee"}, - {file = "ray-2.7.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:bfa924bbc4042e83a0f31f058f08818418307252fceeee27c4c02bc0d3c02f3f"}, - {file = "ray-2.7.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0f5657abb376eddf6b56489082d2f94ab36597a2f25da2849e2f66476b90dcc0"}, - {file = "ray-2.7.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:d548e1c67a512975c4241be64a8df2153ae6c29ee2f5b08834fadcad7dfc94a4"}, - {file = "ray-2.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1f4c09a81971cc54d95be55b9b413fd12121a37528b402d1861a8fa0b4e85509"}, - {file = "ray-2.7.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:1f6d2508d117aac0b880d26a4db65a9f90def2d688709b62e0d039879c3afc7a"}, - {file = "ray-2.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32a6c0866d559d4e6c623ff220cd0790d2da1f3785073a5d0444b8f0486ff541"}, - {file = "ray-2.7.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d035642e6033f43551a0c17e2363a392739f01df6b4072c5ed71cf3096936d33"}, - {file = "ray-2.7.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a366569d1bd220a92af0dbe092821a11d1ff8ad7b00ed4f74b8a5f380e34ccc7"}, - {file = "ray-2.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:6fe65dc7f83f1c617af3068d84f8c67f3371b1a48776e44ab6af54998891364c"}, - {file = "ray-2.7.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:3c1501ca56da394e07213efd5be42c2cf0a2eae68d76949d26a3133154d6d9ff"}, - {file = "ray-2.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57f7e05ad275317158c447680705e046410f68d2a5992e16d07bbc2cc79da2b3"}, - {file = "ray-2.7.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b5410ae53c765108c65821fc5e5968509579f98a64d275e103408e1b068e8ca8"}, - {file = "ray-2.7.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:1b096abab78b63db6c1a2633f242dd8b3c51e395b574215f3cb8e47f5d7364b9"}, - {file = "ray-2.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:c03fe26443598bd7ad1c22de4585daec324bc03eabc04d3c2f805d9697a554d6"}, + {file = "ray-2.7.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:4c415fc90df8a29cf0d594774a456caecf05103d772d487abb6fdda3397ee68e"}, + {file = "ray-2.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0434fe4eb416e5e4f39dbafdd1ead3407f72589cae19d1739b90a35d951e17bc"}, + {file = "ray-2.7.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:b8966bc9e8d005600bdfdb1bfe19241ffbf76b3c111d3931cad2200bc2190d58"}, + {file = "ray-2.7.2-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:162f7e637067a6408f3041755b8df649624fb7200dc7566b142cc5877580b6e3"}, + {file = "ray-2.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:9411fc45e94608c78b0bf77e3ed1dfddbafa0c16e12bcec81cbce26c7ab0791c"}, + {file = "ray-2.7.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:b0db75f9c9116c80d6846498b099f2738ad8b9081a94c625365c6dc95baf50bc"}, + {file = "ray-2.7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20c782af16c9ec5a6ef9c933cea0b2be1a81a90162a4f14415b8692e5a3e1ceb"}, + {file = "ray-2.7.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9a94ee86699fcc42c50ff12b482ad817e71027ceae5c7c26af251931b32ee6d3"}, + {file = "ray-2.7.2-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:75cfbf61383b5e6940cced2033c579e59458fd07d20ba0cf6bcf75f62fb101a2"}, + {file = "ray-2.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:33f99896e8eb19bcd4b029d3b22d9c50e56acd9a3774ff2b074e8c6926124cff"}, + {file = "ray-2.7.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:b8fbf0be759f0a575ec1f504327bffb5d2340bca33fc37beafed91cb78bfe5f5"}, + {file = "ray-2.7.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d5bd397b19b4faf8df82c46dd705b17cc976efefda48145b595c97dd117fa79f"}, + {file = "ray-2.7.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:ee681a5707d9e91850490f42ee007ed8c1c54ea38f5c831aada347f3cc1d9b03"}, + {file = "ray-2.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:155ec98c73c55086e855b2e1f7d6154301556c14bb85d69b17cab087bfcf5268"}, + {file = "ray-2.7.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:94817f7ae24b78213cbcafdeb44e44ae7132e617693725e2a1bbac0461a14382"}, + {file = "ray-2.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3217cbabb31bbfa87a594aaf27371e722a6d527621ef3b01b3f4603b2997cc0d"}, + {file = "ray-2.7.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:19bc3eaabc2635d3a53f43db8198d1a56263a0697401fb7716f90ff60415f2be"}, + {file = "ray-2.7.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:158c424cad4480e93a90d9460a8dd3390bafab220220bb8678f54bc1afecda43"}, + {file = "ray-2.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:f1a41c3511ea90fef47b95721e6589427955503a3fcde1615c6371eeb2c515ab"}, + {file = "ray-2.7.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:6820a6369536e070a72d8bf439cc7085b17f0c5102fb51860b330bf231382e7e"}, + {file = "ray-2.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e226f8739a4726c445fc41dbb41f7440fa202ad97099ee9d35cc8bc91f1d2c5f"}, + {file = "ray-2.7.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:29930d38bd563d57e3457f85dddd0347c79018d12ef42db35abc648c127dce81"}, + {file = "ray-2.7.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:90eb9415f4abc39ce054f2148dcb97fbb9ae725dd150ea7b030ec9d7cf6859f2"}, + {file = "ray-2.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:8010e9f0ac04d901a96b3876def3899867f705148a7d820c08c02c7322d08217"}, ] [package.dependencies] @@ -3376,9 +3322,9 @@ requests = "*" [package.extras] air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml", "ray-cpp (==2.7.1)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml", "ray-cpp (==2.7.2)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] client = ["grpcio (!=1.56.0)"] -cpp = ["ray-cpp (==2.7.1)"] +cpp = ["ray-cpp (==2.7.2)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "virtualenv (>=20.0.24,<20.21.1)"] observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] @@ -3390,14 +3336,13 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. [[package]] name = "referencing" -version = "0.30.2" +version = "0.32.1" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, - {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, + {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, + {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, ] [package.dependencies] @@ -3406,107 +3351,110 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2023.10.3" +version = "2023.12.25" description = "Alternative regular expression module, to replace re." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, - {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, - {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, - {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, - {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, - {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, - {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, - {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, - {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, - {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, - {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, - {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, - {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, - {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, - {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, - {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] [[package]] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3528,7 +3476,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-mock" version = "1.11.0" description = "Mock out responses from the requests package" -category = "dev" optional = false python-versions = "*" files = [ @@ -3548,7 +3495,6 @@ test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "tes name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3565,30 +3511,27 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "responses" -version = "0.23.3" +version = "0.24.1" description = "A utility library for mocking out the `requests` Python library." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "responses-0.23.3-py3-none-any.whl", hash = "sha256:e6fbcf5d82172fecc0aa1860fd91e58cbfd96cee5e96da5b63fa6eb3caa10dd3"}, - {file = "responses-0.23.3.tar.gz", hash = "sha256:205029e1cb334c21cb4ec64fc7599be48b859a0fd381a42443cdd600bfe8b16a"}, + {file = "responses-0.24.1-py3-none-any.whl", hash = "sha256:a2b43f4c08bfb9c9bd242568328c65a34b318741d3fab884ac843c5ceeb543f9"}, + {file = "responses-0.24.1.tar.gz", hash = "sha256:b127c6ca3f8df0eb9cc82fd93109a3007a86acb24871834c47b77765152ecf8c"}, ] [package.dependencies] pyyaml = "*" requests = ">=2.30.0,<3.0" -types-PyYAML = "*" urllib3 = ">=1.25.10,<3.0" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] [[package]] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3603,7 +3546,6 @@ six = "*" name = "rich" version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -3621,116 +3563,116 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.10.3" +version = "0.17.1" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.10.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:485747ee62da83366a44fbba963c5fe017860ad408ccd6cd99aa66ea80d32b2e"}, - {file = "rpds_py-0.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c55f9821f88e8bee4b7a72c82cfb5ecd22b6aad04033334f33c329b29bfa4da0"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b52a67ac66a3a64a7e710ba629f62d1e26ca0504c29ee8cbd99b97df7079a8"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3aed39db2f0ace76faa94f465d4234aac72e2f32b009f15da6492a561b3bbebd"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271c360fdc464fe6a75f13ea0c08ddf71a321f4c55fc20a3fe62ea3ef09df7d9"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef5fddfb264e89c435be4adb3953cef5d2936fdeb4463b4161a6ba2f22e7b740"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771417c9c06c56c9d53d11a5b084d1de75de82978e23c544270ab25e7c066ff"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:52b5cbc0469328e58180021138207e6ec91d7ca2e037d3549cc9e34e2187330a"}, - {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6ac3fefb0d168c7c6cab24fdfc80ec62cd2b4dfd9e65b84bdceb1cb01d385c33"}, - {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8d54bbdf5d56e2c8cf81a1857250f3ea132de77af543d0ba5dce667183b61fec"}, - {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cd2163f42868865597d89399a01aa33b7594ce8e2c4a28503127c81a2f17784e"}, - {file = "rpds_py-0.10.3-cp310-none-win32.whl", hash = "sha256:ea93163472db26ac6043e8f7f93a05d9b59e0505c760da2a3cd22c7dd7111391"}, - {file = "rpds_py-0.10.3-cp310-none-win_amd64.whl", hash = "sha256:7cd020b1fb41e3ab7716d4d2c3972d4588fdfbab9bfbbb64acc7078eccef8860"}, - {file = "rpds_py-0.10.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:1d9b5ee46dcb498fa3e46d4dfabcb531e1f2e76b477e0d99ef114f17bbd38453"}, - {file = "rpds_py-0.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:563646d74a4b4456d0cf3b714ca522e725243c603e8254ad85c3b59b7c0c4bf0"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e626b864725680cd3904414d72e7b0bd81c0e5b2b53a5b30b4273034253bb41f"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485301ee56ce87a51ccb182a4b180d852c5cb2b3cb3a82f7d4714b4141119d8c"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42f712b4668831c0cd85e0a5b5a308700fe068e37dcd24c0062904c4e372b093"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c9141af27a4e5819d74d67d227d5047a20fa3c7d4d9df43037a955b4c748ec5"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef750a20de1b65657a1425f77c525b0183eac63fe7b8f5ac0dd16f3668d3e64f"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1a0ffc39f51aa5f5c22114a8f1906b3c17eba68c5babb86c5f77d8b1bba14d1"}, - {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f4c179a7aeae10ddf44c6bac87938134c1379c49c884529f090f9bf05566c836"}, - {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:176287bb998fd1e9846a9b666e240e58f8d3373e3bf87e7642f15af5405187b8"}, - {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6446002739ca29249f0beaaf067fcbc2b5aab4bc7ee8fb941bd194947ce19aff"}, - {file = "rpds_py-0.10.3-cp311-none-win32.whl", hash = "sha256:c7aed97f2e676561416c927b063802c8a6285e9b55e1b83213dfd99a8f4f9e48"}, - {file = "rpds_py-0.10.3-cp311-none-win_amd64.whl", hash = "sha256:8bd01ff4032abaed03f2db702fa9a61078bee37add0bd884a6190b05e63b028c"}, - {file = "rpds_py-0.10.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:4cf0855a842c5b5c391dd32ca273b09e86abf8367572073bd1edfc52bc44446b"}, - {file = "rpds_py-0.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:69b857a7d8bd4f5d6e0db4086da8c46309a26e8cefdfc778c0c5cc17d4b11e08"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:975382d9aa90dc59253d6a83a5ca72e07f4ada3ae3d6c0575ced513db322b8ec"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35fbd23c1c8732cde7a94abe7fb071ec173c2f58c0bd0d7e5b669fdfc80a2c7b"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:106af1653007cc569d5fbb5f08c6648a49fe4de74c2df814e234e282ebc06957"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce5e7504db95b76fc89055c7f41e367eaadef5b1d059e27e1d6eabf2b55ca314"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aca759ada6b1967fcfd4336dcf460d02a8a23e6abe06e90ea7881e5c22c4de6"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b5d4bdd697195f3876d134101c40c7d06d46c6ab25159ed5cbd44105c715278a"}, - {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a657250807b6efd19b28f5922520ae002a54cb43c2401e6f3d0230c352564d25"}, - {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:177c9dd834cdf4dc39c27436ade6fdf9fe81484758885f2d616d5d03c0a83bd2"}, - {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e22491d25f97199fc3581ad8dd8ce198d8c8fdb8dae80dea3512e1ce6d5fa99f"}, - {file = "rpds_py-0.10.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:2f3e1867dd574014253b4b8f01ba443b9c914e61d45f3674e452a915d6e929a3"}, - {file = "rpds_py-0.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c22211c165166de6683de8136229721f3d5c8606cc2c3d1562da9a3a5058049c"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40bc802a696887b14c002edd43c18082cb7b6f9ee8b838239b03b56574d97f71"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e271dd97c7bb8eefda5cca38cd0b0373a1fea50f71e8071376b46968582af9b"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95cde244e7195b2c07ec9b73fa4c5026d4a27233451485caa1cd0c1b55f26dbd"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a80cf4884920863623a9ee9a285ee04cef57ebedc1cc87b3e3e0f24c8acfe5"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763ad59e105fca09705d9f9b29ecffb95ecdc3b0363be3bb56081b2c6de7977a"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:187700668c018a7e76e89424b7c1042f317c8df9161f00c0c903c82b0a8cac5c"}, - {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5267cfda873ad62591b9332fd9472d2409f7cf02a34a9c9cb367e2c0255994bf"}, - {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:2ed83d53a8c5902ec48b90b2ac045e28e1698c0bea9441af9409fc844dc79496"}, - {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:255f1a10ae39b52122cce26ce0781f7a616f502feecce9e616976f6a87992d6b"}, - {file = "rpds_py-0.10.3-cp38-none-win32.whl", hash = "sha256:a019a344312d0b1f429c00d49c3be62fa273d4a1094e1b224f403716b6d03be1"}, - {file = "rpds_py-0.10.3-cp38-none-win_amd64.whl", hash = "sha256:efb9ece97e696bb56e31166a9dd7919f8f0c6b31967b454718c6509f29ef6fee"}, - {file = "rpds_py-0.10.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:570cc326e78ff23dec7f41487aa9c3dffd02e5ee9ab43a8f6ccc3df8f9327623"}, - {file = "rpds_py-0.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cff7351c251c7546407827b6a37bcef6416304fc54d12d44dbfecbb717064717"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:177914f81f66c86c012311f8c7f46887ec375cfcfd2a2f28233a3053ac93a569"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:448a66b8266de0b581246ca7cd6a73b8d98d15100fb7165974535fa3b577340e"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bbac1953c17252f9cc675bb19372444aadf0179b5df575ac4b56faaec9f6294"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dd9d9d9e898b9d30683bdd2b6c1849449158647d1049a125879cb397ee9cd12"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8c71ea77536149e36c4c784f6d420ffd20bea041e3ba21ed021cb40ce58e2c9"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16a472300bc6c83fe4c2072cc22b3972f90d718d56f241adabc7ae509f53f154"}, - {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9255e7165083de7c1d605e818025e8860636348f34a79d84ec533546064f07e"}, - {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:53d7a3cd46cdc1689296348cb05ffd4f4280035770aee0c8ead3bbd4d6529acc"}, - {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22da15b902f9f8e267020d1c8bcfc4831ca646fecb60254f7bc71763569f56b1"}, - {file = "rpds_py-0.10.3-cp39-none-win32.whl", hash = "sha256:850c272e0e0d1a5c5d73b1b7871b0a7c2446b304cec55ccdb3eaac0d792bb065"}, - {file = "rpds_py-0.10.3-cp39-none-win_amd64.whl", hash = "sha256:de61e424062173b4f70eec07e12469edde7e17fa180019a2a0d75c13a5c5dc57"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:af247fd4f12cca4129c1b82090244ea5a9d5bb089e9a82feb5a2f7c6a9fe181d"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ad59efe24a4d54c2742929001f2d02803aafc15d6d781c21379e3f7f66ec842"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642ed0a209ced4be3a46f8cb094f2d76f1f479e2a1ceca6de6346a096cd3409d"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37d0c59548ae56fae01c14998918d04ee0d5d3277363c10208eef8c4e2b68ed6"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad6ed9e70ddfb34d849b761fb243be58c735be6a9265b9060d6ddb77751e3e8"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f94fdd756ba1f79f988855d948ae0bad9ddf44df296770d9a58c774cfbcca72"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77076bdc8776a2b029e1e6ffbe6d7056e35f56f5e80d9dc0bad26ad4a024a762"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87d9b206b1bd7a0523375dc2020a6ce88bca5330682ae2fe25e86fd5d45cea9c"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8efaeb08ede95066da3a3e3c420fcc0a21693fcd0c4396d0585b019613d28515"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a4d9bfda3f84fc563868fe25ca160c8ff0e69bc4443c5647f960d59400ce6557"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d27aa6bbc1f33be920bb7adbb95581452cdf23005d5611b29a12bb6a3468cc95"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ed8313809571a5463fd7db43aaca68ecb43ca7a58f5b23b6e6c6c5d02bdc7882"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:e10e6a1ed2b8661201e79dff5531f8ad4cdd83548a0f81c95cf79b3184b20c33"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:015de2ce2af1586ff5dc873e804434185199a15f7d96920ce67e50604592cae9"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae87137951bb3dc08c7d8bfb8988d8c119f3230731b08a71146e84aaa919a7a9"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bb4f48bd0dd18eebe826395e6a48b7331291078a879295bae4e5d053be50d4c"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09362f86ec201288d5687d1dc476b07bf39c08478cde837cb710b302864e7ec9"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821392559d37759caa67d622d0d2994c7a3f2fb29274948ac799d496d92bca73"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7170cbde4070dc3c77dec82abf86f3b210633d4f89550fa0ad2d4b549a05572a"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:5de11c041486681ce854c814844f4ce3282b6ea1656faae19208ebe09d31c5b8"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:4ed172d0c79f156c1b954e99c03bc2e3033c17efce8dd1a7c781bc4d5793dfac"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:11fdd1192240dda8d6c5d18a06146e9045cb7e3ba7c06de6973000ff035df7c6"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f602881d80ee4228a2355c68da6b296a296cd22bbb91e5418d54577bbf17fa7c"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:691d50c99a937709ac4c4cd570d959a006bd6a6d970a484c84cc99543d4a5bbb"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24cd91a03543a0f8d09cb18d1cb27df80a84b5553d2bd94cba5979ef6af5c6e7"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc2200e79d75b5238c8d69f6a30f8284290c777039d331e7340b6c17cad24a5a"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea65b59882d5fa8c74a23f8960db579e5e341534934f43f3b18ec1839b893e41"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:829e91f3a8574888b73e7a3feb3b1af698e717513597e23136ff4eba0bc8387a"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eab75a8569a095f2ad470b342f2751d9902f7944704f0571c8af46bede438475"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:061c3ff1f51ecec256e916cf71cc01f9975af8fb3af9b94d3c0cc8702cfea637"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:39d05e65f23a0fe897b6ac395f2a8d48c56ac0f583f5d663e0afec1da89b95da"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eca20917a06d2fca7628ef3c8b94a8c358f6b43f1a621c9815243462dcccf97"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e8d0f0eca087630d58b8c662085529781fd5dc80f0a54eda42d5c9029f812599"}, - {file = "rpds_py-0.10.3.tar.gz", hash = "sha256:fcc1ebb7561a3e24a6588f7c6ded15d80aec22c66a070c757559b57b17ffd1cb"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, + {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, + {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, + {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, + {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, + {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, + {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, + {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, + {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, + {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, + {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, + {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, ] [[package]] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -3743,48 +3685,45 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3fs" -version = "2023.9.1" +version = "2023.12.2" description = "Convenient Filesystem interface over S3" -category = "main" optional = true python-versions = ">= 3.8" files = [ - {file = "s3fs-2023.9.1-py3-none-any.whl", hash = "sha256:3bd1f9f33e4ad090d150301c3b386061cb7085fc8bda3a9ec9198dccca765d6c"}, - {file = "s3fs-2023.9.1.tar.gz", hash = "sha256:42e1821ed94c1607c848853d1d715ebcd25c13380b6f510c2cb498c7e5b3e674"}, + {file = "s3fs-2023.12.2-py3-none-any.whl", hash = "sha256:0d5a99039665f30b2dbee5495de3b299a022d51b3195a9440f5df47c2621b777"}, + {file = "s3fs-2023.12.2.tar.gz", hash = "sha256:b5ec07062481bbb45cb061b31984c7188d106e292c27033039e024e4ba5740dc"}, ] [package.dependencies] -aiobotocore = ">=2.5.4,<2.6.0" +aiobotocore = ">=2.5.4,<3.0.0" aiohttp = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1" -fsspec = "2023.9.1" +fsspec = "2023.12.2" [package.extras] -awscli = ["aiobotocore[awscli] (>=2.5.4,<2.6.0)"] -boto3 = ["aiobotocore[boto3] (>=2.5.4,<2.6.0)"] +awscli = ["aiobotocore[awscli] (>=2.5.4,<3.0.0)"] +boto3 = ["aiobotocore[boto3] (>=2.5.4,<3.0.0)"] [[package]] name = "s3transfer" -version = "0.6.2" +version = "0.10.0" description = "An Amazon S3 Transfer Manager" -category = "main" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"}, - {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"}, + {file = "s3transfer-0.10.0-py3-none-any.whl", hash = "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e"}, + {file = "s3transfer-0.10.0.tar.gz", hash = "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b"}, ] [package.dependencies] -botocore = ">=1.12.36,<2.0a.0" +botocore = ">=1.33.2,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "sarif-om" version = "1.0.4" description = "Classes implementing the SARIF 2.1.0 object model." -category = "dev" optional = false python-versions = ">= 2.7" files = [ @@ -3798,18 +3737,17 @@ pbr = "*" [[package]] name = "setuptools" -version = "68.2.2" +version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] @@ -3817,7 +3755,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3829,7 +3766,6 @@ files = [ name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "main" optional = false python-versions = "*" files = [ @@ -3841,7 +3777,6 @@ files = [ name = "sqlalchemy" version = "2.0.25" description = "Database Abstraction Library" -category = "main" optional = true python-versions = ">=3.7" files = [ @@ -3929,7 +3864,6 @@ sqlcipher = ["sqlcipher3_binary"] name = "sshpubkeys" version = "3.3.1" description = "SSH public key parser" -category = "dev" optional = false python-versions = ">=3" files = [ @@ -3948,7 +3882,6 @@ dev = ["twine", "wheel", "yapf"] name = "strictyaml" version = "1.7.3" description = "Strict, typed YAML parser" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -3963,7 +3896,6 @@ python-dateutil = ">=2.6.0" name = "sympy" version = "1.12" description = "Computer algebra system (CAS) in Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3978,7 +3910,6 @@ mpmath = ">=0.19" name = "thrift" version = "0.16.0" description = "Python bindings for the Apache Thrift RPC system" -category = "main" optional = true python-versions = "*" files = [ @@ -3997,7 +3928,6 @@ twisted = ["twisted"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4005,23 +3935,10 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "types-pyyaml" -version = "6.0.12.11" -description = "Typing stubs for PyYAML" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "types-PyYAML-6.0.12.11.tar.gz", hash = "sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b"}, - {file = "types_PyYAML-6.0.12.11-py3-none-any.whl", hash = "sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d"}, -] - [[package]] name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4031,21 +3948,19 @@ files = [ [[package]] name = "tzdata" -version = "2023.3" +version = "2023.4" description = "Provider of IANA time zone data" -category = "main" optional = true python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, + {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] [[package]] name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -4058,22 +3973,38 @@ brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotl secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "virtualenv" -version = "20.24.5" +version = "20.25.0" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.5-py3-none-any.whl", hash = "sha256:b80039f280f4919c77b30f1c23294ae357c4c8701042086e3fc005963e4e537b"}, - {file = "virtualenv-20.24.5.tar.gz", hash = "sha256:e8361967f6da6fbdf1426483bfe9fca8287c242ac0bc30429905721cefbff752"}, + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, ] [package.dependencies] distlib = ">=0.3.7,<1" filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<4" +platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] @@ -4083,7 +4014,6 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "werkzeug" version = "3.0.1" description = "The comprehensive WSGI web application library." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4099,94 +4029,87 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "wrapt" -version = "1.15.0" +version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." -category = "main" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, - {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, - {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, - {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, - {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, - {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, - {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, - {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, - {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, - {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, - {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, - {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, - {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, - {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, - {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, - {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, - {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, - {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, - {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] [[package]] name = "xmltodict" version = "0.13.0" description = "Makes working with XML feel like you are working with JSON" -category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -4196,86 +4119,101 @@ files = [ [[package]] name = "yarl" -version = "1.9.2" +version = "1.9.4" description = "Yet another URL library" -category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, ] [package.dependencies] @@ -4286,7 +4224,6 @@ multidict = ">=4.0" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4302,7 +4239,6 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p name = "zstandard" version = "0.22.0" description = "Zstandard bindings for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4379,4 +4315,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "aca2a5ecf024067cf7ab8123908b7dedf1853f8a3b078429591ddb04ec558f9c" +content-hash = "32b2b8186f77ccc5b67cfc8f1cc04795f0b8cb27dff43c3fad851cbed8d3f386" diff --git a/pyproject.toml b/pyproject.toml index 059fd8c0bb..2ffb6c925b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ zstandard = ">=0.13.0,<1.0.0" pyarrow = { version = ">=9.0.0,<16.0.0", optional = true } pandas = { version = ">=1.0.0,<3.0.0", optional = true } duckdb = { version = ">=0.5.0,<1.0.0", optional = true } -ray = { version = ">=2.0.0,<3.0.0", optional = true } +ray = { version = ">=2.0.0,<2.8.0", optional = true } python-snappy = { version = ">=0.6.0,<1.0.0", optional = true } thrift = { version = ">=0.13.0,<1.0.0", optional = true } mypy-boto3-glue = { version = ">=1.28.18", optional = true } From 43919199ea961db25aa855b6bbe5b370d252fe21 Mon Sep 17 00:00:00 2001 From: Michael Marino Date: Thu, 25 Jan 2024 02:44:34 +0100 Subject: [PATCH 039/169] Set Glue Table Information when creating/updating tables (#288) * Set Glue Table Information when creating/updating tables * Add integration tests for glue/Athena --- pyiceberg/catalog/glue.py | 113 ++++++++++++++++- tests/catalog/integration_test_glue.py | 162 ++++++++++++++++++++++++- tests/catalog/test_glue.py | 45 ++++++- 3 files changed, 312 insertions(+), 8 deletions(-) diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index 6cf9462b71..bccbfa4f0a 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -18,6 +18,7 @@ from typing import ( Any, + Dict, List, Optional, Set, @@ -28,6 +29,7 @@ import boto3 from mypy_boto3_glue.client import GlueClient from mypy_boto3_glue.type_defs import ( + ColumnTypeDef, DatabaseInputTypeDef, DatabaseTypeDef, StorageDescriptorTypeDef, @@ -59,12 +61,32 @@ ) from pyiceberg.io import load_file_io from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec -from pyiceberg.schema import Schema +from pyiceberg.schema import Schema, SchemaVisitor, visit from pyiceberg.serializers import FromInputFile from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, update_table_metadata -from pyiceberg.table.metadata import new_table_metadata +from pyiceberg.table.metadata import TableMetadata, new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.types import ( + BinaryType, + BooleanType, + DateType, + DecimalType, + DoubleType, + FixedType, + FloatType, + IntegerType, + ListType, + LongType, + MapType, + NestedField, + PrimitiveType, + StringType, + StructType, + TimestampType, + TimeType, + UUIDType, +) # If Glue should skip archiving an old table version when creating a new version in a commit. By # default, Glue archives all old table versions after an UpdateTable call, but Glue has a default @@ -73,6 +95,10 @@ GLUE_SKIP_ARCHIVE = "glue.skip-archive" GLUE_SKIP_ARCHIVE_DEFAULT = True +ICEBERG_FIELD_ID = "iceberg.field.id" +ICEBERG_FIELD_OPTIONAL = "iceberg.field.optional" +ICEBERG_FIELD_CURRENT = "iceberg.field.current" + def _construct_parameters( metadata_location: str, glue_table: Optional[TableTypeDef] = None, prev_metadata_location: Optional[str] = None @@ -84,10 +110,86 @@ def _construct_parameters( return new_parameters +GLUE_PRIMITIVE_TYPES = { + BooleanType: "boolean", + IntegerType: "int", + LongType: "bigint", + FloatType: "float", + DoubleType: "double", + DateType: "date", + TimeType: "string", + StringType: "string", + UUIDType: "string", + TimestampType: "timestamp", + FixedType: "binary", + BinaryType: "binary", +} + + +class _IcebergSchemaToGlueType(SchemaVisitor[str]): + def schema(self, schema: Schema, struct_result: str) -> str: + return struct_result + + def struct(self, struct: StructType, field_results: List[str]) -> str: + return f"struct<{','.join(field_results)}>" + + def field(self, field: NestedField, field_result: str) -> str: + return f"{field.name}:{field_result}" + + def list(self, list_type: ListType, element_result: str) -> str: + return f"array<{element_result}>" + + def map(self, map_type: MapType, key_result: str, value_result: str) -> str: + return f"map<{key_result},{value_result}>" + + def primitive(self, primitive: PrimitiveType) -> str: + if isinstance(primitive, DecimalType): + return f"decimal({primitive.precision},{primitive.scale})" + if (primitive_type := type(primitive)) not in GLUE_PRIMITIVE_TYPES: + raise ValueError(f"Unknown primitive type: {primitive}") + return GLUE_PRIMITIVE_TYPES[primitive_type] + + +def _to_columns(metadata: TableMetadata) -> List[ColumnTypeDef]: + results: Dict[str, ColumnTypeDef] = {} + + def _append_to_results(field: NestedField, is_current: bool) -> None: + if field.name in results: + return + + results[field.name] = cast( + ColumnTypeDef, + { + "Name": field.name, + "Type": visit(field.field_type, _IcebergSchemaToGlueType()), + "Parameters": { + ICEBERG_FIELD_ID: str(field.field_id), + ICEBERG_FIELD_OPTIONAL: str(field.optional).lower(), + ICEBERG_FIELD_CURRENT: str(is_current).lower(), + }, + }, + ) + if field.doc: + results[field.name]["Comment"] = field.doc + + if current_schema := metadata.schema_by_id(metadata.current_schema_id): + for field in current_schema.columns: + _append_to_results(field, True) + + for schema in metadata.schemas: + if schema.schema_id == metadata.current_schema_id: + continue + for field in schema.columns: + _append_to_results(field, False) + + return list(results.values()) + + def _construct_table_input( table_name: str, metadata_location: str, properties: Properties, + metadata: TableMetadata, glue_table: Optional[TableTypeDef] = None, prev_metadata_location: Optional[str] = None, ) -> TableInputTypeDef: @@ -95,6 +197,10 @@ def _construct_table_input( "Name": table_name, "TableType": EXTERNAL_TABLE, "Parameters": _construct_parameters(metadata_location, glue_table, prev_metadata_location), + "StorageDescriptor": { + "Columns": _to_columns(metadata), + "Location": metadata.location, + }, } if "Description" in properties: @@ -258,7 +364,7 @@ def create_table( io = load_file_io(properties=self.properties, location=metadata_location) self._write_metadata(metadata, io, metadata_location) - table_input = _construct_table_input(table_name, metadata_location, properties) + table_input = _construct_table_input(table_name, metadata_location, properties, metadata) database_name, table_name = self.identifier_to_database_and_table(identifier) self._create_glue_table(database_name=database_name, table_name=table_name, table_input=table_input) @@ -322,6 +428,7 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons table_name=table_name, metadata_location=new_metadata_location, properties=current_table.properties, + metadata=updated_metadata, glue_table=current_glue_table, prev_metadata_location=current_table.metadata_location, ) diff --git a/tests/catalog/integration_test_glue.py b/tests/catalog/integration_test_glue.py index 24401cae39..a56e4c6aaa 100644 --- a/tests/catalog/integration_test_glue.py +++ b/tests/catalog/integration_test_glue.py @@ -15,9 +15,12 @@ # specific language governing permissions and limitations # under the License. -from typing import Generator, List +import time +from typing import Any, Dict, Generator, List +from uuid import uuid4 import boto3 +import pyarrow as pa import pytest from botocore.exceptions import ClientError @@ -30,6 +33,7 @@ NoSuchTableError, TableAlreadyExistsError, ) +from pyiceberg.io.pyarrow import schema_to_pyarrow from pyiceberg.schema import Schema from pyiceberg.types import IntegerType from tests.conftest import clean_up, get_bucket_name, get_s3_path @@ -52,8 +56,62 @@ def fixture_test_catalog() -> Generator[Catalog, None, None]: clean_up(test_catalog) +class AthenaQueryHelper: + _athena_client: boto3.client + _s3_resource: boto3.resource + _output_bucket: str + _output_path: str + + def __init__(self) -> None: + self._s3_resource = boto3.resource("s3") + self._athena_client = boto3.client("athena") + self._output_bucket = get_bucket_name() + self._output_path = f"athena_results_{uuid4()}" + + def get_query_results(self, query: str) -> List[Dict[str, Any]]: + query_execution_id = self._athena_client.start_query_execution( + QueryString=query, ResultConfiguration={"OutputLocation": f"s3://{self._output_bucket}/{self._output_path}"} + )["QueryExecutionId"] + + while True: + result = self._athena_client.get_query_execution(QueryExecutionId=query_execution_id)["QueryExecution"]["Status"] + query_status = result["State"] + assert query_status not in [ + "FAILED", + "CANCELLED", + ], f""" + Athena query with the string failed or was cancelled: + Query: {query} + Status: {query_status} + Reason: {result["StateChangeReason"]}""" + + if query_status not in ["QUEUED", "RUNNING"]: + break + time.sleep(0.5) + + # No pagination for now, assume that we are not doing large queries + return self._athena_client.get_query_results(QueryExecutionId=query_execution_id)["ResultSet"]["Rows"] + + def clean_up(self) -> None: + bucket = self._s3_resource.Bucket(self._output_bucket) + for obj in bucket.objects.filter(Prefix=f"{self._output_path}/"): + self._s3_resource.Object(bucket.name, obj.key).delete() + + +@pytest.fixture(name="athena", scope="module") +def fixture_athena_helper() -> Generator[AthenaQueryHelper, None, None]: + query_helper = AthenaQueryHelper() + yield query_helper + query_helper.clean_up() + + def test_create_table( - test_catalog: Catalog, s3: boto3.client, table_schema_nested: Schema, table_name: str, database_name: str + test_catalog: Catalog, + s3: boto3.client, + table_schema_nested: Schema, + table_name: str, + database_name: str, + athena: AthenaQueryHelper, ) -> None: identifier = (database_name, table_name) test_catalog.create_namespace(database_name) @@ -64,6 +122,48 @@ def test_create_table( s3.head_object(Bucket=get_bucket_name(), Key=metadata_location) assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + table.append( + pa.Table.from_pylist( + [ + { + "foo": "foo_val", + "bar": 1, + "baz": False, + "qux": ["x", "y"], + "quux": {"key": {"subkey": 2}}, + "location": [{"latitude": 1.1}], + "person": {"name": "some_name", "age": 23}, + } + ], + schema=schema_to_pyarrow(table.schema()), + ), + ) + + assert athena.get_query_results(f'SELECT * FROM "{database_name}"."{table_name}"') == [ + { + "Data": [ + {"VarCharValue": "foo"}, + {"VarCharValue": "bar"}, + {"VarCharValue": "baz"}, + {"VarCharValue": "qux"}, + {"VarCharValue": "quux"}, + {"VarCharValue": "location"}, + {"VarCharValue": "person"}, + ] + }, + { + "Data": [ + {"VarCharValue": "foo_val"}, + {"VarCharValue": "1"}, + {"VarCharValue": "false"}, + {"VarCharValue": "[x, y]"}, + {"VarCharValue": "{key={subkey=2}}"}, + {"VarCharValue": "[{latitude=1.1, longitude=null}]"}, + {"VarCharValue": "{name=some_name, age=23}"}, + ] + }, + ] + def test_create_table_with_invalid_location(table_schema_nested: Schema, table_name: str, database_name: str) -> None: identifier = (database_name, table_name) @@ -269,7 +369,7 @@ def test_update_namespace_properties(test_catalog: Catalog, database_name: str) def test_commit_table_update_schema( - test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str + test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str, athena: AthenaQueryHelper ) -> None: identifier = (database_name, table_name) test_catalog.create_namespace(namespace=database_name) @@ -279,6 +379,20 @@ def test_commit_table_update_schema( assert test_catalog._parse_metadata_version(table.metadata_location) == 0 assert original_table_metadata.current_schema_id == 0 + assert athena.get_query_results(f'SELECT * FROM "{database_name}"."{table_name}"') == [ + { + "Data": [ + {"VarCharValue": "foo"}, + {"VarCharValue": "bar"}, + {"VarCharValue": "baz"}, + {"VarCharValue": "qux"}, + {"VarCharValue": "quux"}, + {"VarCharValue": "location"}, + {"VarCharValue": "person"}, + ] + } + ] + transaction = table.transaction() update = transaction.update_schema() update.add_column(path="b", field_type=IntegerType()) @@ -295,6 +409,48 @@ def test_commit_table_update_schema( assert new_schema == update._apply() assert new_schema.find_field("b").field_type == IntegerType() + table.append( + pa.Table.from_pylist( + [ + { + "foo": "foo_val", + "bar": 1, + "location": [{"latitude": 1.1}], + "person": {"name": "some_name", "age": 23}, + "b": 2, + } + ], + schema=schema_to_pyarrow(new_schema), + ), + ) + + assert athena.get_query_results(f'SELECT * FROM "{database_name}"."{table_name}"') == [ + { + "Data": [ + {"VarCharValue": "foo"}, + {"VarCharValue": "bar"}, + {"VarCharValue": "baz"}, + {"VarCharValue": "qux"}, + {"VarCharValue": "quux"}, + {"VarCharValue": "location"}, + {"VarCharValue": "person"}, + {"VarCharValue": "b"}, + ] + }, + { + "Data": [ + {"VarCharValue": "foo_val"}, + {"VarCharValue": "1"}, + {}, + {"VarCharValue": "[]"}, + {"VarCharValue": "{}"}, + {"VarCharValue": "[{latitude=1.1, longitude=null}]"}, + {"VarCharValue": "{name=some_name, age=23}"}, + {"VarCharValue": "2"}, + ] + }, + ] + def test_commit_table_properties(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index bf6d11784f..b1f1371a04 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -38,7 +38,12 @@ @mock_glue def test_create_table_with_database_location( - _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str + _glue: boto3.client, + _bucket_initialize: None, + moto_endpoint_url: str, + table_schema_nested: Schema, + database_name: str, + table_name: str, ) -> None: catalog_name = "glue" identifier = (database_name, table_name) @@ -49,6 +54,22 @@ def test_create_table_with_database_location( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + # Ensure schema is also pushed to Glue + table_info = _glue.get_table( + DatabaseName=database_name, + Name=table_name, + ) + storage_descriptor = table_info["Table"]["StorageDescriptor"] + columns = storage_descriptor["Columns"] + assert len(columns) == len(table_schema_nested.fields) + assert columns[0] == { + "Name": "foo", + "Type": "string", + "Parameters": {"iceberg.field.id": "1", "iceberg.field.optional": "true", "iceberg.field.current": "true"}, + } + + assert storage_descriptor["Location"] == f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}" + @mock_glue def test_create_table_with_default_warehouse( @@ -524,7 +545,12 @@ def test_passing_profile_name() -> None: @mock_glue def test_commit_table_update_schema( - _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str + _glue: boto3.client, + _bucket_initialize: None, + moto_endpoint_url: str, + table_schema_nested: Schema, + database_name: str, + table_name: str, ) -> None: catalog_name = "glue" identifier = (database_name, table_name) @@ -554,6 +580,21 @@ def test_commit_table_update_schema( assert new_schema == update._apply() assert new_schema.find_field("b").field_type == IntegerType() + # Ensure schema is also pushed to Glue + table_info = _glue.get_table( + DatabaseName=database_name, + Name=table_name, + ) + storage_descriptor = table_info["Table"]["StorageDescriptor"] + columns = storage_descriptor["Columns"] + assert len(columns) == len(table_schema_nested.fields) + 1 + assert columns[-1] == { + "Name": "b", + "Type": "int", + "Parameters": {"iceberg.field.id": "18", "iceberg.field.optional": "true", "iceberg.field.current": "true"}, + } + assert storage_descriptor["Location"] == f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}" + @mock_glue def test_commit_table_properties( From bc7ed1c3ad2f55be4a0b6a9c67aff7401de3be6f Mon Sep 17 00:00:00 2001 From: Drew Gallardo Date: Wed, 24 Jan 2024 17:53:56 -0800 Subject: [PATCH 040/169] Remove redundant API call to Glue (#300) --- pyiceberg/catalog/glue.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index bccbfa4f0a..645568f80a 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -597,7 +597,6 @@ def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: table_list: List[TableTypeDef] = [] next_token: Optional[str] = None try: - table_list_response = self.glue.get_tables(DatabaseName=database_name) while True: table_list_response = ( self.glue.get_tables(DatabaseName=database_name) @@ -624,7 +623,6 @@ def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identi return [] database_list: List[DatabaseTypeDef] = [] - databases_response = self.glue.get_databases() next_token: Optional[str] = None while True: From 0f08806d4431d5d60998dac1bca5780b6d2e2785 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 Jan 2024 18:38:21 -0800 Subject: [PATCH 041/169] Build: Bump mkdocs-material from 9.5.4 to 9.5.5 (#302) --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 52ac9bef14..fd5c182572 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -23,6 +23,6 @@ mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 -mkdocs-material==9.5.4 +mkdocs-material==9.5.5 mkdocs-material-extensions==1.3.1 mkdocs-section-index==0.3.8 From 8464d713d69e6e2f4fa6f629362a27c940a097e6 Mon Sep 17 00:00:00 2001 From: Honah J Date: Thu, 25 Jan 2024 01:59:35 -0800 Subject: [PATCH 042/169] Bump PySpark to 3.5.0 (#303) --- poetry.lock | 18 +++++++++--------- pyproject.toml | 2 +- tests/__init__.py | 16 ---------------- tests/integration/__init__.py | 16 ---------------- tests/integration/test_writes.py | 8 +++++++- 5 files changed, 17 insertions(+), 43 deletions(-) delete mode 100644 tests/__init__.py delete mode 100644 tests/integration/__init__.py diff --git a/poetry.lock b/poetry.lock index 88ac9115d2..cab01e4ce3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "adlfs" @@ -2456,8 +2456,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -2994,23 +2994,23 @@ tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [[package]] name = "pyspark" -version = "3.4.2" +version = "3.5.0" description = "Apache Spark Python API" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyspark-3.4.2.tar.gz", hash = "sha256:088db1b8ff33a748b802f1710ff6f6dcef0e0f2cca7d69bbbe55b187a0d55c3f"}, + {file = "pyspark-3.5.0.tar.gz", hash = "sha256:d41a9b76bd2aca370a6100d075c029e22ba44c5940927877e9435a3a9c566558"}, ] [package.dependencies] py4j = "0.10.9.7" [package.extras] -connect = ["googleapis-common-protos (>=1.56.4)", "grpcio (>=1.48.1)", "grpcio-status (>=1.48.1)", "numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] +connect = ["googleapis-common-protos (>=1.56.4)", "grpcio (>=1.56.0)", "grpcio-status (>=1.56.0)", "numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=4.0.0)"] ml = ["numpy (>=1.15)"] mllib = ["numpy (>=1.15)"] -pandas-on-spark = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] -sql = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=1.0.0)"] +pandas-on-spark = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=4.0.0)"] +sql = ["numpy (>=1.15)", "pandas (>=1.0.5)", "pyarrow (>=4.0.0)"] [[package]] name = "pytest" @@ -4315,4 +4315,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "32b2b8186f77ccc5b67cfc8f1cc04795f0b8cb27dff43c3fad851cbed8d3f386" +content-hash = "8cbb637a0be18ca7ddacdc91e6cb78d6e71607b54c2cbdc89f903d2997c4f7e2" diff --git a/pyproject.toml b/pyproject.toml index 2ffb6c925b..505fedf813 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ requests-mock = "1.11.0" moto = { version = "^4.2.13", extras = ["server"] } typing-extensions = "4.9.0" pytest-mock = "3.12.0" -pyspark = "3.4.2" +pyspark = "3.5.0" cython = "3.0.8" [[tool.mypy.overrides]] diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index 13a83393a9..0000000000 --- a/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py deleted file mode 100644 index 13a83393a9..0000000000 --- a/tests/integration/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index f8317e481d..a095c13315 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -230,10 +230,16 @@ def table_v1_v2_appended_with_null(session_catalog: Catalog, arrow_table_with_nu @pytest.fixture(scope="session") def spark() -> SparkSession: + import importlib.metadata import os + spark_version = ".".join(importlib.metadata.version("pyspark").split(".")[:2]) + scala_version = "2.12" + iceberg_version = "1.4.3" + os.environ["PYSPARK_SUBMIT_ARGS"] = ( - "--packages org.apache.iceberg:iceberg-spark-runtime-3.4_2.12:1.4.0,org.apache.iceberg:iceberg-aws-bundle:1.4.0 pyspark-shell" + f"--packages org.apache.iceberg:iceberg-spark-runtime-{spark_version}_{scala_version}:{iceberg_version}," + f"org.apache.iceberg:iceberg-aws-bundle:{iceberg_version} pyspark-shell" ) os.environ["AWS_REGION"] = "us-east-1" os.environ["AWS_ACCESS_KEY_ID"] = "admin" From 2a27f2b66ef76efcf0996cd0fcc52eaad58b9cca Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 26 Jan 2024 08:52:44 +0100 Subject: [PATCH 043/169] Arrow: Don't copy the list/map when not needed (#252) --- dev/provision.py | 22 ++++++ pyiceberg/io/pyarrow.py | 51 ++++++++----- tests/integration/test_reads.py | 13 ++++ tests/io/test_pyarrow.py | 129 ++++++++++++++++++++++++++++---- 4 files changed, 183 insertions(+), 32 deletions(-) diff --git a/dev/provision.py b/dev/provision.py index e5048d2fa5..44086caf20 100644 --- a/dev/provision.py +++ b/dev/provision.py @@ -320,3 +320,25 @@ spark.sql(f"ALTER TABLE {catalog_name}.default.test_table_add_column ADD COLUMN b string") spark.sql(f"INSERT INTO {catalog_name}.default.test_table_add_column VALUES ('2', '2')") + + spark.sql( + f""" + CREATE TABLE {catalog_name}.default.test_table_empty_list_and_map ( + col_list array, + col_map map, + col_list_with_struct array> + ) + USING iceberg + TBLPROPERTIES ( + 'format-version'='1' + ); + """ + ) + + spark.sql( + f""" + INSERT INTO {catalog_name}.default.test_table_empty_list_and_map + VALUES (null, null, null), + (array(), map(), array(struct(1))) + """ + ) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 035f5e8031..cbfb9f6d5a 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -168,6 +168,7 @@ LIST_ELEMENT_NAME = "element" MAP_KEY_NAME = "key" MAP_VALUE_NAME = "value" +DOC = "doc" T = TypeVar("T") @@ -1118,12 +1119,20 @@ class ArrowProjectionVisitor(SchemaWithPartnerVisitor[pa.Array, Optional[pa.Arra def __init__(self, file_schema: Schema): self.file_schema = file_schema - def cast_if_needed(self, field: NestedField, values: pa.Array) -> pa.Array: + def _cast_if_needed(self, field: NestedField, values: pa.Array) -> pa.Array: file_field = self.file_schema.find_field(field.field_id) if field.field_type.is_primitive and field.field_type != file_field.field_type: return values.cast(schema_to_pyarrow(promote(file_field.field_type, field.field_type))) return values + def _construct_field(self, field: NestedField, arrow_type: pa.DataType) -> pa.Field: + return pa.field( + name=field.name, + type=arrow_type, + nullable=field.optional, + metadata={DOC: field.doc} if field.doc is not None else None, + ) + def schema(self, schema: Schema, schema_partner: Optional[pa.Array], struct_result: Optional[pa.Array]) -> Optional[pa.Array]: return struct_result @@ -1136,13 +1145,13 @@ def struct( fields: List[pa.Field] = [] for field, field_array in zip(struct.fields, field_results): if field_array is not None: - array = self.cast_if_needed(field, field_array) + array = self._cast_if_needed(field, field_array) field_arrays.append(array) - fields.append(pa.field(field.name, array.type, field.optional)) + fields.append(self._construct_field(field, array.type)) elif field.optional: arrow_type = schema_to_pyarrow(field.field_type) field_arrays.append(pa.nulls(len(struct_array), type=arrow_type)) - fields.append(pa.field(field.name, arrow_type, field.optional)) + fields.append(self._construct_field(field, arrow_type)) else: raise ResolveError(f"Field is required, and could not be found in the file: {field}") @@ -1152,24 +1161,32 @@ def field(self, field: NestedField, _: Optional[pa.Array], field_array: Optional return field_array def list(self, list_type: ListType, list_array: Optional[pa.Array], value_array: Optional[pa.Array]) -> Optional[pa.Array]: - return ( - pa.ListArray.from_arrays(list_array.offsets, self.cast_if_needed(list_type.element_field, value_array)) - if isinstance(list_array, pa.ListArray) - else None - ) + if isinstance(list_array, pa.ListArray) and value_array is not None: + if isinstance(value_array, pa.StructArray): + # This can be removed once this has been fixed: + # https://github.com/apache/arrow/issues/38809 + list_array = pa.ListArray.from_arrays(list_array.offsets, value_array) + + arrow_field = pa.list_(self._construct_field(list_type.element_field, value_array.type)) + return list_array.cast(arrow_field) + else: + return None def map( self, map_type: MapType, map_array: Optional[pa.Array], key_result: Optional[pa.Array], value_result: Optional[pa.Array] ) -> Optional[pa.Array]: - return ( - pa.MapArray.from_arrays( - map_array.offsets, - self.cast_if_needed(map_type.key_field, key_result), - self.cast_if_needed(map_type.value_field, value_result), + if isinstance(map_array, pa.MapArray) and key_result is not None and value_result is not None: + arrow_field = pa.map_( + self._construct_field(map_type.key_field, key_result.type), + self._construct_field(map_type.value_field, value_result.type), ) - if isinstance(map_array, pa.MapArray) - else None - ) + if isinstance(value_result, pa.StructArray): + # Arrow does not allow reordering of fields, therefore we have to copy the array :( + return pa.MapArray.from_arrays(map_array.offsets, key_result, value_result, arrow_field) + else: + return map_array.cast(arrow_field) + else: + return None def primitive(self, _: PrimitiveType, array: Optional[pa.Array]) -> Optional[pa.Array]: return array diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index e7c8b74cf0..3fc06fbddc 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -428,3 +428,16 @@ def test_sanitize_character(catalog: Catalog) -> None: assert len(arrow_table.schema.names), 1 assert len(table_test_table_sanitized_character.schema().fields), 1 assert arrow_table.schema.names[0] == table_test_table_sanitized_character.schema().fields[0].name + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_null_list_and_map(catalog: Catalog) -> None: + table_test_empty_list_and_map = catalog.load_table("default.test_table_empty_list_and_map") + arrow_table = table_test_empty_list_and_map.scan().to_arrow() + assert arrow_table["col_list"].to_pylist() == [None, []] + assert arrow_table["col_map"].to_pylist() == [None, []] + # This should be: + # assert arrow_table["col_list_with_struct"].to_pylist() == [None, [{'test': 1}]] + # Once https://github.com/apache/arrow/issues/38809 has been fixed + assert arrow_table["col_list_with_struct"].to_pylist() == [[], [{'test': 1}]] diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index 5efeb42ed8..e6f4de3596 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -682,6 +682,24 @@ def schema_list_of_structs() -> Schema: ) +@pytest.fixture +def schema_map_of_structs() -> Schema: + return Schema( + NestedField( + 5, + "locations", + MapType( + key_id=51, + value_id=52, + key_type=StringType(), + value_type=StructType(NestedField(511, "lat", DoubleType()), NestedField(512, "long", DoubleType())), + element_required=False, + ), + required=False, + ), + ) + + @pytest.fixture def schema_map() -> Schema: return Schema( @@ -793,6 +811,25 @@ def file_list_of_structs(schema_list_of_structs: Schema, tmpdir: str) -> str: ) +@pytest.fixture +def file_map_of_structs(schema_map_of_structs: Schema, tmpdir: str) -> str: + pyarrow_schema = schema_to_pyarrow( + schema_map_of_structs, metadata={ICEBERG_SCHEMA: bytes(schema_map_of_structs.model_dump_json(), UTF8)} + ) + return _write_table_to_file( + f"file:{tmpdir}/e.parquet", + pyarrow_schema, + pa.Table.from_pylist( + [ + {"locations": {"1": {"lat": 52.371807, "long": 4.896029}, "2": {"lat": 52.387386, "long": 4.646219}}}, + {"locations": {}}, + {"locations": {"3": {"lat": 52.078663, "long": 4.288788}, "4": {"lat": 52.387386, "long": 4.646219}}}, + ], + schema=pyarrow_schema, + ), + ) + + @pytest.fixture def file_map(schema_map: Schema, tmpdir: str) -> str: pyarrow_schema = schema_to_pyarrow(schema_map, metadata={ICEBERG_SCHEMA: bytes(schema_map.model_dump_json(), UTF8)}) @@ -914,7 +951,11 @@ def test_read_list(schema_list: Schema, file_list: str) -> None: for actual, expected in zip(result_table.columns[0], [list(range(1, 10)), list(range(2, 20)), list(range(3, 30))]): assert actual.as_py() == expected - assert repr(result_table.schema) == "ids: list\n child 0, item: int32" + assert ( + repr(result_table.schema) + == """ids: list + child 0, element: int32""" + ) def test_read_map(schema_map: Schema, file_map: str) -> None: @@ -927,9 +968,9 @@ def test_read_map(schema_map: Schema, file_map: str) -> None: assert ( repr(result_table.schema) == """properties: map - child 0, entries: struct not null + child 0, entries: struct not null child 0, key: string not null - child 1, value: string""" + child 1, value: string not null""" ) @@ -1063,7 +1104,11 @@ def test_projection_nested_struct_subset(file_struct: str) -> None: assert actual.as_py() == {"lat": expected} assert len(result_table.columns[0]) == 3 - assert repr(result_table.schema) == "location: struct not null\n child 0, lat: double not null" + assert ( + repr(result_table.schema) + == """location: struct not null + child 0, lat: double not null""" + ) def test_projection_nested_new_field(file_struct: str) -> None: @@ -1082,7 +1127,11 @@ def test_projection_nested_new_field(file_struct: str) -> None: for actual, expected in zip(result_table.columns[0], [None, None, None]): assert actual.as_py() == {"null": expected} assert len(result_table.columns[0]) == 3 - assert repr(result_table.schema) == "location: struct not null\n child 0, null: double" + assert ( + repr(result_table.schema) + == """location: struct not null + child 0, null: double""" + ) def test_projection_nested_struct(schema_struct: Schema, file_struct: str) -> None: @@ -1111,7 +1160,10 @@ def test_projection_nested_struct(schema_struct: Schema, file_struct: str) -> No assert len(result_table.columns[0]) == 3 assert ( repr(result_table.schema) - == "location: struct not null\n child 0, lat: double\n child 1, null: double\n child 2, long: double" + == """location: struct not null + child 0, lat: double + child 1, null: double + child 2, long: double""" ) @@ -1136,28 +1188,75 @@ def test_projection_list_of_structs(schema_list_of_structs: Schema, file_list_of result_table = project(schema, [file_list_of_structs]) assert len(result_table.columns) == 1 assert len(result_table.columns[0]) == 3 + results = [row.as_py() for row in result_table.columns[0]] + assert results == [ + [ + {'latitude': 52.371807, 'longitude': 4.896029, 'altitude': None}, + {'latitude': 52.387386, 'longitude': 4.646219, 'altitude': None}, + ], + [], + [ + {'latitude': 52.078663, 'longitude': 4.288788, 'altitude': None}, + {'latitude': 52.387386, 'longitude': 4.646219, 'altitude': None}, + ], + ] + assert ( + repr(result_table.schema) + == """locations: list> + child 0, element: struct + child 0, latitude: double not null + child 1, longitude: double not null + child 2, altitude: double""" + ) + + +def test_projection_maps_of_structs(schema_map_of_structs: Schema, file_map_of_structs: str) -> None: + schema = Schema( + NestedField( + 5, + "locations", + MapType( + key_id=51, + value_id=52, + key_type=StringType(), + value_type=StructType( + NestedField(511, "latitude", DoubleType()), + NestedField(512, "longitude", DoubleType()), + NestedField(513, "altitude", DoubleType(), required=False), + ), + element_required=False, + ), + required=False, + ), + ) + + result_table = project(schema, [file_map_of_structs]) + assert len(result_table.columns) == 1 + assert len(result_table.columns[0]) == 3 for actual, expected in zip( result_table.columns[0], [ [ - {"latitude": 52.371807, "longitude": 4.896029, "altitude": None}, - {"latitude": 52.387386, "longitude": 4.646219, "altitude": None}, + ("1", {"latitude": 52.371807, "longitude": 4.896029, "altitude": None}), + ("2", {"latitude": 52.387386, "longitude": 4.646219, "altitude": None}), ], [], [ - {"latitude": 52.078663, "longitude": 4.288788, "altitude": None}, - {"latitude": 52.387386, "longitude": 4.646219, "altitude": None}, + ("3", {"latitude": 52.078663, "longitude": 4.288788, "altitude": None}), + ("4", {"latitude": 52.387386, "longitude": 4.646219, "altitude": None}), ], ], ): assert actual.as_py() == expected assert ( repr(result_table.schema) - == """locations: list> - child 0, item: struct - child 0, latitude: double not null - child 1, longitude: double not null - child 2, altitude: double""" + == """locations: map> + child 0, entries: struct not null> not null + child 0, key: string not null + child 1, value: struct not null + child 0, latitude: double not null + child 1, longitude: double not null + child 2, altitude: double""" ) From cd7fb502900a717d6b902a398b267eb10e4faa9b Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 26 Jan 2024 14:36:20 +0100 Subject: [PATCH 044/169] Add UnionByName functionality (#296) * Add UnionByName functionality * Thanks Honah! * Add `_id` Co-authored-by: Honah J. * Fix --------- Co-authored-by: Honah J. --- mkdocs/docs/api.md | 41 +++ pyiceberg/table/__init__.py | 188 +++++++++- pyiceberg/types.py | 12 + tests/test_schema.py | 671 +++++++++++++++++++++++++++++++++++- 4 files changed, 905 insertions(+), 7 deletions(-) diff --git a/mkdocs/docs/api.md b/mkdocs/docs/api.md index 9d97d4f676..6f79835db0 100644 --- a/mkdocs/docs/api.md +++ b/mkdocs/docs/api.md @@ -293,6 +293,47 @@ with table.transaction() as transaction: # ... Update properties etc ``` +### Union by Name + +Using `.union_by_name()` you can merge another schema into an existing schema without having to worry about field-IDs: + +```python +from pyiceberg.catalog import load_catalog +from pyiceberg.schema import Schema +from pyiceberg.types import NestedField, StringType, DoubleType, LongType + +catalog = load_catalog() + +schema = Schema( + NestedField(1, "city", StringType(), required=False), + NestedField(2, "lat", DoubleType(), required=False), + NestedField(3, "long", DoubleType(), required=False), +) + +table = catalog.create_table("default.locations", schema) + +new_schema = Schema( + NestedField(1, "city", StringType(), required=False), + NestedField(2, "lat", DoubleType(), required=False), + NestedField(3, "long", DoubleType(), required=False), + NestedField(10, "population", LongType(), required=False), +) + +with table.update_schema() as update: + update.union_by_name(new_schema) +``` + +Now the table has the union of the two schemas `print(table.schema())`: + +``` +table { + 1: city: optional string + 2: lat: optional double + 3: long: optional double + 4: population: optional long +} +``` + ### Add column Using `add_column` you can add a column, without having to worry about the field-id: diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 057dd8427c..221a609e5c 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -69,11 +69,14 @@ ) from pyiceberg.partitioning import PartitionSpec from pyiceberg.schema import ( + PartnerAccessor, Schema, SchemaVisitor, + SchemaWithPartnerVisitor, assign_fresh_schema_ids, promote, visit, + visit_with_partner, ) from pyiceberg.table.metadata import ( INITIAL_SEQUENCE_NUMBER, @@ -1379,7 +1382,7 @@ class Move: class UpdateSchema: - _table: Table + _table: Optional[Table] _schema: Schema _last_column_id: itertools.count[int] _identifier_field_names: Set[str] @@ -1398,14 +1401,23 @@ class UpdateSchema: def __init__( self, - table: Table, + table: Optional[Table], transaction: Optional[Transaction] = None, allow_incompatible_changes: bool = False, case_sensitive: bool = True, + schema: Optional[Schema] = None, ) -> None: self._table = table - self._schema = table.schema() - self._last_column_id = itertools.count(table.metadata.last_column_id + 1) + + if isinstance(schema, Schema): + self._schema = schema + self._last_column_id = itertools.count(1 + schema.highest_field_id) + elif table is not None: + self._schema = table.schema() + self._last_column_id = itertools.count(1 + table.metadata.last_column_id) + else: + raise ValueError("Either provide a table or a schema") + self._identifier_field_names = self._schema.identifier_field_names() self._adds = {} @@ -1449,6 +1461,15 @@ def case_sensitive(self, case_sensitive: bool) -> UpdateSchema: self._case_sensitive = case_sensitive return self + def union_by_name(self, new_schema: Schema) -> UpdateSchema: + visit_with_partner( + new_schema, + -1, + UnionByNameVisitor(update_schema=self, existing_schema=self._schema, case_sensitive=self._case_sensitive), # type: ignore + PartnerIdByNameAccessor(partner_schema=self._schema, case_sensitive=self._case_sensitive), + ) + return self + def add_column( self, path: Union[str, Tuple[str, ...]], field_type: IcebergType, doc: Optional[str] = None, required: bool = False ) -> UpdateSchema: @@ -1816,6 +1837,9 @@ def move_after(self, path: Union[str, Tuple[str, ...]], after_name: Union[str, T def commit(self) -> None: """Apply the pending changes and commit.""" + if self._table is None: + raise ValueError("Requires a table to commit to") + new_schema = self._apply() existing_schema_id = next((schema.schema_id for schema in self._table.metadata.schemas if schema == new_schema), None) @@ -1862,7 +1886,8 @@ def _apply(self) -> Schema: field_ids.add(field.field_id) - return Schema(*struct.fields, schema_id=1 + max(self._table.schemas().keys()), identifier_field_ids=field_ids) + next_schema_id = 1 + (max(self._table.schemas().keys()) if self._table is not None else self._schema.schema_id) + return Schema(*struct.fields, schema_id=next_schema_id, identifier_field_ids=field_ids) def assign_new_column_id(self) -> int: return next(self._last_column_id) @@ -1995,6 +2020,159 @@ def primitive(self, primitive: PrimitiveType) -> Optional[IcebergType]: return primitive +class UnionByNameVisitor(SchemaWithPartnerVisitor[int, bool]): + update_schema: UpdateSchema + existing_schema: Schema + case_sensitive: bool + + def __init__(self, update_schema: UpdateSchema, existing_schema: Schema, case_sensitive: bool) -> None: + self.update_schema = update_schema + self.existing_schema = existing_schema + self.case_sensitive = case_sensitive + + def schema(self, schema: Schema, partner_id: Optional[int], struct_result: bool) -> bool: + return struct_result + + def struct(self, struct: StructType, partner_id: Optional[int], missing_positions: List[bool]) -> bool: + if partner_id is None: + return True + + fields = struct.fields + partner_struct = self._find_field_type(partner_id) + + if not partner_struct.is_struct: + raise ValueError(f"Expected a struct, got: {partner_struct}") + + for pos, missing in enumerate(missing_positions): + if missing: + self._add_column(partner_id, fields[pos]) + else: + field = fields[pos] + if nested_field := partner_struct.field_by_name(field.name, case_sensitive=self.case_sensitive): + self._update_column(field, nested_field) + + return False + + def _add_column(self, parent_id: int, field: NestedField) -> None: + if parent_name := self.existing_schema.find_column_name(parent_id): + path: Tuple[str, ...] = (parent_name, field.name) + else: + path = (field.name,) + + self.update_schema.add_column(path=path, field_type=field.field_type, required=field.required, doc=field.doc) + + def _update_column(self, field: NestedField, existing_field: NestedField) -> None: + full_name = self.existing_schema.find_column_name(existing_field.field_id) + + if full_name is None: + raise ValueError(f"Could not find field: {existing_field}") + + if field.optional and existing_field.required: + self.update_schema.make_column_optional(full_name) + + if field.field_type.is_primitive and field.field_type != existing_field.field_type: + self.update_schema.update_column(full_name, field_type=field.field_type) + + if field.doc is not None and not field.doc != existing_field.doc: + self.update_schema.update_column(full_name, doc=field.doc) + + def _find_field_type(self, field_id: int) -> IcebergType: + if field_id == -1: + return self.existing_schema.as_struct() + else: + return self.existing_schema.find_field(field_id).field_type + + def field(self, field: NestedField, partner_id: Optional[int], field_result: bool) -> bool: + return partner_id is None + + def list(self, list_type: ListType, list_partner_id: Optional[int], element_missing: bool) -> bool: + if list_partner_id is None: + return True + + if element_missing: + raise ValueError("Error traversing schemas: element is missing, but list is present") + + partner_list_type = self._find_field_type(list_partner_id) + if not isinstance(partner_list_type, ListType): + raise ValueError(f"Expected list-type, got: {partner_list_type}") + + self._update_column(list_type.element_field, partner_list_type.element_field) + + return False + + def map(self, map_type: MapType, map_partner_id: Optional[int], key_missing: bool, value_missing: bool) -> bool: + if map_partner_id is None: + return True + + if key_missing: + raise ValueError("Error traversing schemas: key is missing, but map is present") + + if value_missing: + raise ValueError("Error traversing schemas: value is missing, but map is present") + + partner_map_type = self._find_field_type(map_partner_id) + if not isinstance(partner_map_type, MapType): + raise ValueError(f"Expected map-type, got: {partner_map_type}") + + self._update_column(map_type.key_field, partner_map_type.key_field) + self._update_column(map_type.value_field, partner_map_type.value_field) + + return False + + def primitive(self, primitive: PrimitiveType, primitive_partner_id: Optional[int]) -> bool: + return primitive_partner_id is None + + +class PartnerIdByNameAccessor(PartnerAccessor[int]): + partner_schema: Schema + case_sensitive: bool + + def __init__(self, partner_schema: Schema, case_sensitive: bool) -> None: + self.partner_schema = partner_schema + self.case_sensitive = case_sensitive + + def schema_partner(self, partner: Optional[int]) -> Optional[int]: + return -1 + + def field_partner(self, partner_field_id: Optional[int], field_id: int, field_name: str) -> Optional[int]: + if partner_field_id is not None: + if partner_field_id == -1: + struct = self.partner_schema.as_struct() + else: + struct = self.partner_schema.find_field(partner_field_id).field_type + if not struct.is_struct: + raise ValueError(f"Expected StructType: {struct}") + + if field := struct.field_by_name(name=field_name, case_sensitive=self.case_sensitive): + return field.field_id + + return None + + def list_element_partner(self, partner_list_id: Optional[int]) -> Optional[int]: + if partner_list_id is not None and (field := self.partner_schema.find_field(partner_list_id)): + if not isinstance(field.field_type, ListType): + raise ValueError(f"Expected ListType: {field}") + return field.field_type.element_field.field_id + else: + return None + + def map_key_partner(self, partner_map_id: Optional[int]) -> Optional[int]: + if partner_map_id is not None and (field := self.partner_schema.find_field(partner_map_id)): + if not isinstance(field.field_type, MapType): + raise ValueError(f"Expected MapType: {field}") + return field.field_type.key_field.field_id + else: + return None + + def map_value_partner(self, partner_map_id: Optional[int]) -> Optional[int]: + if partner_map_id is not None and (field := self.partner_schema.find_field(partner_map_id)): + if not isinstance(field.field_type, MapType): + raise ValueError(f"Expected MapType: {field}") + return field.field_type.value_field.field_id + else: + return None + + def _add_fields(fields: Tuple[NestedField, ...], adds: Optional[List[NestedField]]) -> Tuple[NestedField, ...]: adds = adds or [] return fields + tuple(adds) diff --git a/pyiceberg/types.py b/pyiceberg/types.py index 5e7c193295..eb215121dc 100644 --- a/pyiceberg/types.py +++ b/pyiceberg/types.py @@ -350,6 +350,18 @@ def field(self, field_id: int) -> Optional[NestedField]: return field return None + def field_by_name(self, name: str, case_sensitive: bool = True) -> Optional[NestedField]: + if case_sensitive: + name_lower = name.lower() + for field in self.fields: + if field.name.lower() == name_lower: + return field + else: + for field in self.fields: + if field.name == name: + return field + return None + def __str__(self) -> str: """Return the string representation of the StructType class.""" return f"struct<{', '.join(map(str, self.fields))}>" diff --git a/tests/test_schema.py b/tests/test_schema.py index 8e34423cf9..a5487b7fd9 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -16,12 +16,12 @@ # under the License. from textwrap import dedent -from typing import Any, Dict +from typing import Any, Dict, List import pytest from pyiceberg import schema -from pyiceberg.exceptions import ResolveError +from pyiceberg.exceptions import ResolveError, ValidationError from pyiceberg.expressions import Accessor from pyiceberg.schema import ( Schema, @@ -30,6 +30,7 @@ prune_columns, sanitize_column_names, ) +from pyiceberg.table import UpdateSchema from pyiceberg.typedef import EMPTY_DICT, StructProtocol from pyiceberg.types import ( BinaryType, @@ -45,6 +46,7 @@ LongType, MapType, NestedField, + PrimitiveType, StringType, StructType, TimestampType, @@ -912,3 +914,668 @@ def test_promotion(file_type: IcebergType, read_type: IcebergType) -> None: else: with pytest.raises(ResolveError): promote(file_type, read_type) + + +@pytest.fixture() +def primitive_fields() -> List[NestedField]: + return [ + NestedField(field_id=1, name=str(primitive_type), field_type=primitive_type, required=False) + for primitive_type in TEST_PRIMITIVE_TYPES + ] + + +def test_add_top_level_primitives(primitive_fields: NestedField) -> None: + for primitive_field in primitive_fields: + new_schema = Schema(primitive_field) + applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + assert applied == new_schema + + +def test_add_top_level_list_of_primitives(primitive_fields: NestedField) -> None: + for primitive_type in TEST_PRIMITIVE_TYPES: + new_schema = Schema( + NestedField( + field_id=1, + name="aList", + field_type=ListType(element_id=2, element_type=primitive_type, element_required=False), + required=False, + ) + ) + applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_top_level_map_of_primitives(primitive_fields: NestedField) -> None: + for primitive_type in TEST_PRIMITIVE_TYPES: + new_schema = Schema( + NestedField( + field_id=1, + name="aMap", + field_type=MapType( + key_id=2, key_type=primitive_type, value_id=3, value_type=primitive_type, value_required=False + ), + required=False, + ) + ) + applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_top_struct_of_primitives(primitive_fields: NestedField) -> None: + for primitive_type in TEST_PRIMITIVE_TYPES: + new_schema = Schema( + NestedField( + field_id=1, + name="aStruct", + field_type=StructType(NestedField(field_id=2, name="primitive", field_type=primitive_type, required=False)), + required=False, + ) + ) + applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_nested_primitive(primitive_fields: NestedField) -> None: + for primitive_type in TEST_PRIMITIVE_TYPES: + current_schema = Schema(NestedField(field_id=1, name="aStruct", field_type=StructType(), required=False)) + new_schema = Schema( + NestedField( + field_id=1, + name="aStruct", + field_type=StructType(NestedField(field_id=2, name="primitive", field_type=primitive_type, required=False)), + required=False, + ) + ) + applied = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + assert applied.as_struct() == new_schema.as_struct() + + +def _primitive_fields(types: List[PrimitiveType], start_id: int = 0) -> List[NestedField]: + fields = [] + for iceberg_type in types: + fields.append(NestedField(field_id=start_id, name=str(iceberg_type), field_type=iceberg_type, required=False)) + start_id = start_id + 1 + + return fields + + +def test_add_nested_primitives(primitive_fields: NestedField) -> None: + current_schema = Schema(NestedField(field_id=1, name="aStruct", field_type=StructType(), required=False)) + new_schema = Schema( + NestedField( + field_id=1, name="aStruct", field_type=StructType(*_primitive_fields(TEST_PRIMITIVE_TYPES, 2)), required=False + ) + ) + applied = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_nested_lists(primitive_fields: NestedField) -> None: + new_schema = Schema( + NestedField( + field_id=1, + name="aList", + type=ListType( + element_id=2, + element_type=ListType( + element_id=3, + element_type=ListType( + element_id=4, + element_type=ListType( + element_id=5, + element_type=ListType( + element_id=6, + element_type=ListType( + element_id=7, + element_type=ListType( + element_id=8, + element_type=ListType(element_id=9, element_type=DecimalType(precision=11, scale=20)), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + element_required=False, + ), + required=False, + ) + ) + applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_nested_struct(primitive_fields: NestedField) -> None: + new_schema = Schema( + NestedField( + field_id=1, + name="struct1", + type=StructType( + NestedField( + field_id=2, + name="struct2", + type=StructType( + NestedField( + field_id=3, + name="struct3", + type=StructType( + NestedField( + field_id=4, + name="struct4", + type=StructType( + NestedField( + field_id=5, + name="struct5", + type=StructType( + NestedField( + field_id=6, + name="struct6", + type=StructType( + NestedField(field_id=7, name="aString", field_type=StringType()) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + assert applied.as_struct() == new_schema.as_struct() + + +def test_add_nested_maps(primitive_fields: NestedField) -> None: + new_schema = Schema( + NestedField( + field_id=1, + name="struct", + field_type=MapType( + key_id=2, + value_id=3, + key_type=StringType(), + value_type=MapType( + key_id=4, + value_id=5, + key_type=StringType(), + value_type=MapType( + key_id=6, + value_id=7, + key_type=StringType(), + value_type=MapType( + key_id=8, + value_id=9, + key_type=StringType(), + value_type=MapType( + key_id=10, + value_id=11, + key_type=StringType(), + value_type=MapType(key_id=12, value_id=13, key_type=StringType(), value_type=StringType()), + value_required=False, + ), + value_required=False, + ), + value_required=False, + ), + value_required=False, + ), + value_required=False, + ), + required=False, + ) + ) + applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + assert applied.as_struct() == new_schema.as_struct() + + +def test_detect_invalid_top_level_list() -> None: + current_schema = Schema( + NestedField( + field_id=1, + name="aList", + field_type=ListType(element_id=2, element_type=StringType(), element_required=False), + required=False, + ) + ) + new_schema = Schema( + NestedField( + field_id=1, + name="aList", + field_type=ListType(element_id=2, element_type=DoubleType(), element_required=False), + required=False, + ) + ) + + with pytest.raises(ValidationError, match="Cannot change column type: aList.element: string -> double"): + _ = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + + +def test_detect_invalid_top_level_maps() -> None: + current_schema = Schema( + NestedField( + field_id=1, + name="aMap", + field_type=MapType(key_id=2, value_id=3, key_type=StringType(), value_type=StringType(), value_required=False), + required=False, + ) + ) + new_schema = Schema( + NestedField( + field_id=1, + name="aMap", + field_type=MapType(key_id=2, value_id=3, key_type=UUIDType(), value_type=StringType(), value_required=False), + required=False, + ) + ) + + with pytest.raises(ValidationError, match="Cannot change column type: aMap.key: string -> uuid"): + _ = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + + +def test_promote_float_to_double() -> None: + current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=FloatType(), required=False)) + new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DoubleType(), required=False)) + + applied = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + + assert applied.as_struct() == new_schema.as_struct() + assert len(applied.fields) == 1 + assert isinstance(applied.fields[0].field_type, DoubleType) + + +def test_detect_invalid_promotion_double_to_float() -> None: + current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DoubleType(), required=False)) + new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=FloatType(), required=False)) + + with pytest.raises(ValidationError, match="Cannot change column type: aCol: double -> float"): + _ = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + + +# decimal(P,S) Fixed-point decimal; precision P, scale S -> Scale is fixed [1], +# precision must be 38 or less +def test_type_promote_decimal_to_fixed_scale_with_wider_precision() -> None: + current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DecimalType(precision=20, scale=1), required=False)) + new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DecimalType(precision=22, scale=1), required=False)) + + applied = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + + assert applied.as_struct() == new_schema.as_struct() + assert len(applied.fields) == 1 + field = applied.fields[0] + decimal_type = field.field_type + assert isinstance(decimal_type, DecimalType) + assert decimal_type.precision == 22 + assert decimal_type.scale == 1 + + +def test_add_nested_structs(primitive_fields: NestedField) -> None: + schema = Schema( + NestedField( + field_id=1, + name="struct1", + field_type=StructType( + NestedField( + field_id=2, + name="struct2", + field_type=StructType( + NestedField( + field_id=3, + name="list", + field_type=ListType( + element_id=4, + element_type=StructType( + NestedField(field_id=5, name="value", field_type=StringType(), required=False) + ), + element_required=False, + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + new_schema = Schema( + NestedField( + field_id=1, + name="struct1", + field_type=StructType( + NestedField( + field_id=2, + name="struct2", + field_type=StructType( + NestedField( + field_id=3, + name="list", + field_type=ListType( + element_id=4, + element_type=StructType( + NestedField(field_id=5, name="time", field_type=TimeType(), required=False) + ), + element_required=False, + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + applied = UpdateSchema(None, schema=schema).union_by_name(new_schema)._apply() + + expected = Schema( + NestedField( + field_id=1, + name="struct1", + field_type=StructType( + NestedField( + field_id=2, + name="struct2", + field_type=StructType( + NestedField( + field_id=3, + name="list", + field_type=ListType( + element_id=4, + element_type=StructType( + NestedField(field_id=5, name="value", field_type=StringType(), required=False), + NestedField(field_id=6, name="time", field_type=TimeType(), required=False), + ), + element_required=False, + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + + assert applied.as_struct() == expected.as_struct() + + +def test_replace_list_with_primitive() -> None: + current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=ListType(element_id=2, element_type=StringType()))) + new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=StringType())) + + with pytest.raises(ValidationError, match="Cannot change column type: list is not a primitive"): + _ = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + + +def test_mirrored_schemas() -> None: + current_schema = Schema( + NestedField(9, "struct1", StructType(NestedField(8, "string1", StringType(), required=False)), required=False), + NestedField(6, "list1", ListType(element_id=7, element_type=StringType(), element_required=False), required=False), + NestedField(5, "string2", StringType(), required=False), + NestedField(4, "string3", StringType(), required=False), + NestedField(3, "string4", StringType(), required=False), + NestedField(2, "string5", StringType(), required=False), + NestedField(1, "string6", StringType(), required=False), + ) + mirrored_schema = Schema( + NestedField(1, "struct1", StructType(NestedField(2, "string1", StringType(), required=False))), + NestedField(3, "list1", ListType(element_id=4, element_type=StringType(), element_required=False), required=False), + NestedField(5, "string2", StringType(), required=False), + NestedField(6, "string3", StringType(), required=False), + NestedField(7, "string4", StringType(), required=False), + NestedField(8, "string5", StringType(), required=False), + NestedField(9, "string6", StringType(), required=False), + ) + + applied = UpdateSchema(None, schema=current_schema).union_by_name(mirrored_schema)._apply() + + assert applied.as_struct() == current_schema.as_struct() + + +def test_add_new_top_level_struct() -> None: + current_schema = Schema( + NestedField( + 1, + "map1", + MapType( + key_id=2, + value_id=3, + key_type=StringType(), + value_type=ListType( + element_id=4, + element_type=StructType(NestedField(field_id=5, name="string", field_type=StringType(), required=False)), + ), + value_required=False, + ), + ) + ) + observed_schema = Schema( + NestedField( + 1, + "map1", + MapType( + key_id=2, + value_id=3, + key_type=StringType(), + value_type=ListType( + element_id=4, + element_type=StructType(NestedField(field_id=5, name="string", field_type=StringType(), required=False)), + ), + value_required=False, + ), + ), + NestedField( + field_id=6, + name="struct1", + field_type=StructType( + NestedField( + field_id=7, + name="d1", + field_type=StructType(NestedField(field_id=8, name="d2", field_type=StringType(), required=False)), + required=False, + ) + ), + required=False, + ), + ) + + applied = UpdateSchema(None, schema=current_schema).union_by_name(observed_schema)._apply() + + assert applied.as_struct() == observed_schema.as_struct() + + +def test_append_nested_struct() -> None: + current_schema = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType(NestedField(field_id=4, name="s4", field_type=StringType(), required=False)), + ) + ), + required=False, + ) + ), + ) + ) + observed_schema = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType(NestedField(field_id=4, name="s4", field_type=StringType(), required=False)), + required=False, + ), + NestedField( + field_id=5, + name="repeat", + field_type=StructType( + NestedField( + field_id=6, + name="s1", + field_type=StructType( + NestedField( + field_id=7, + name="s2", + field_type=StructType( + NestedField( + field_id=8, + name="s3", + field_type=StructType( + NestedField(field_id=9, name="s4", field_type=StringType()) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ), + required=False, + ), + required=False, + ) + ), + required=False, + ) + ) + + applied = UpdateSchema(None, schema=current_schema).union_by_name(observed_schema)._apply() + + assert applied.as_struct() == observed_schema.as_struct() + + +def test_append_nested_lists() -> None: + current_schema = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType( + NestedField( + field_id=4, + name="list1", + field_type=ListType(element_id=5, element_type=StringType(), element_required=False), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + + observed_schema = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType( + NestedField( + field_id=4, + name="list2", + field_type=ListType(element_id=5, element_type=StringType(), element_required=False), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + union = UpdateSchema(None, schema=current_schema).union_by_name(observed_schema)._apply() + + expected = Schema( + NestedField( + field_id=1, + name="s1", + field_type=StructType( + NestedField( + field_id=2, + name="s2", + field_type=StructType( + NestedField( + field_id=3, + name="s3", + field_type=StructType( + NestedField( + field_id=4, + name="list1", + field_type=ListType(element_id=5, element_type=StringType(), element_required=False), + required=False, + ), + NestedField( + field_id=6, + name="list2", + field_type=ListType(element_id=7, element_type=StringType(), element_required=False), + required=False, + ), + ), + required=False, + ) + ), + required=False, + ) + ), + required=False, + ) + ) + + assert union.as_struct() == expected.as_struct() From 5f01ee7fd70e164a916d5d25ba74209a1fca9a3a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 27 Jan 2024 08:59:46 +0100 Subject: [PATCH 045/169] Build: Bump pypa/cibuildwheel from 2.16.2 to 2.16.3 (#309) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.2 to 2.16.3. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.16.2...v2.16.3) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 9c4d1c848b..219434664e 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -59,7 +59,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu') - name: Build wheels - uses: pypa/cibuildwheel@v2.16.2 + uses: pypa/cibuildwheel@v2.16.3 with: output-dir: wheelhouse config-file: "pyproject.toml" From b03570bec1550ad061490439f17f9a13d451f47e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 27 Jan 2024 09:02:19 +0100 Subject: [PATCH 046/169] Build: Bump boto3 from 1.34.22 to 1.34.27 (#308) Bumps [boto3](https://github.com/boto/boto3) from 1.34.22 to 1.34.27. - [Release notes](https://github.com/boto/boto3/releases) - [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst) - [Commits](https://github.com/boto/boto3/compare/1.34.22...1.34.27) --- updated-dependencies: - dependency-name: boto3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/poetry.lock b/poetry.lock index cab01e4ce3..1df3729eff 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "adlfs" @@ -25,24 +25,24 @@ tests = ["arrow", "dask[dataframe]", "docker", "pytest", "pytest-mock"] [[package]] name = "aiobotocore" -version = "2.11.0" +version = "2.11.1" description = "Async client for aws services using botocore and aiohttp" optional = true python-versions = ">=3.8" files = [ - {file = "aiobotocore-2.11.0-py3-none-any.whl", hash = "sha256:6eaf48a6ccd3943ce7d26f75dc8fa0292b7a1a069bd5a9557d37fae5adf14d2d"}, - {file = "aiobotocore-2.11.0.tar.gz", hash = "sha256:4c5b1bf01e7aab74a29bd783159517ecf6d45b5a1647e53302e9554c064e420a"}, + {file = "aiobotocore-2.11.1-py3-none-any.whl", hash = "sha256:904a7ad7cc8671d662cfd596906dafe839118ea2a66332c37908e3dcfdee1e45"}, + {file = "aiobotocore-2.11.1.tar.gz", hash = "sha256:0b095af50da2d6f94e93ca959e2a4876f0f0d84d534b61b21d8e050832d04ab6"}, ] [package.dependencies] aiohttp = ">=3.7.4.post0,<4.0.0" aioitertools = ">=0.5.1,<1.0.0" -botocore = ">=1.33.2,<1.34.23" +botocore = ">=1.33.2,<1.34.28" wrapt = ">=1.10.10,<2.0.0" [package.extras] -awscli = ["awscli (>=1.31.2,<1.32.23)"] -boto3 = ["boto3 (>=1.33.2,<1.34.23)"] +awscli = ["awscli (>=1.31.2,<1.32.28)"] +boto3 = ["boto3 (>=1.33.2,<1.34.28)"] [[package]] name = "aiohttp" @@ -332,17 +332,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.22" +version = "1.34.27" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.22-py3-none-any.whl", hash = "sha256:5909cd1393143576265c692e908a9ae495492c04a0ffd4bae8578adc2e44729e"}, - {file = "boto3-1.34.22.tar.gz", hash = "sha256:a98c0b86f6044ff8314cc2361e1ef574d674318313ab5606ccb4a6651c7a3f8c"}, + {file = "boto3-1.34.27-py3-none-any.whl", hash = "sha256:3626db4ba9fbb1b58c8fe923da5ed670873b3d881a102956ea19d3b69cd097cc"}, + {file = "boto3-1.34.27.tar.gz", hash = "sha256:ebdd938019f3df2e7b50585353963d4553faf3fbb7b2085c440107fa6caa233b"}, ] [package.dependencies] -botocore = ">=1.34.22,<1.35.0" +botocore = ">=1.34.27,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -351,13 +351,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.22" +version = "1.34.27" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.22-py3-none-any.whl", hash = "sha256:e5f7775975b9213507fbcf846a96b7a2aec2a44fc12a44585197b014a4ab0889"}, - {file = "botocore-1.34.22.tar.gz", hash = "sha256:c47ba4286c576150d1b6ca6df69a87b5deff3d23bd84da8bcf8431ebac3c40ba"}, + {file = "botocore-1.34.27-py3-none-any.whl", hash = "sha256:1c10f247136ad17b6ef1588c1e043e294dbaebdebe9ce84dc56713029f515c53"}, + {file = "botocore-1.34.27.tar.gz", hash = "sha256:a0e68ba264275b358b8c1cca604161f4d9465cf7847d73e929543a9f30ff22d1"}, ] [package.dependencies] @@ -2639,6 +2639,7 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -2647,6 +2648,8 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -3237,6 +3240,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, From 9e0394939bab8d6b26cdde6f71173bdd42b55b2e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 27 Jan 2024 09:02:42 +0100 Subject: [PATCH 047/169] Build: Bump coverage from 7.4.0 to 7.4.1 (#307) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.4.0 to 7.4.1. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.4.0...7.4.1) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 108 ++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 55 insertions(+), 55 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1df3729eff..41f13a7c23 100644 --- a/poetry.lock +++ b/poetry.lock @@ -641,63 +641,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -4319,4 +4319,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "8cbb637a0be18ca7ddacdc91e6cb78d6e71607b54c2cbdc89f903d2997c4f7e2" +content-hash = "6d7f598c9ddb0e7565ef8af1b3270c9e5eb32718abfd0a3a7d5c192543150ba8" diff --git a/pyproject.toml b/pyproject.toml index 505fedf813..e7f18b5551 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest-checkdocs = "2.10.1" pytest-lazy-fixture = "0.6.3" pre-commit = "3.5.0" fastavro = "1.9.3" -coverage = { version = "^7.4.0", extras = ["toml"] } +coverage = { version = "^7.4.1", extras = ["toml"] } requests-mock = "1.11.0" moto = { version = "^4.2.13", extras = ["server"] } typing-extensions = "4.9.0" From acc934fb76aa6c6e2e32b60c8a99f9e2b2c627dd Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Mon, 29 Jan 2024 00:21:15 +0100 Subject: [PATCH 048/169] Check the types when writing (#313) --- pyiceberg/table/__init__.py | 16 ++++++++++++++++ tests/integration/test_writes.py | 18 ++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 221a609e5c..26eecefd0f 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -932,6 +932,14 @@ def append(self, df: pa.Table) -> None: Args: df: The Arrow dataframe that will be appended to overwrite the table """ + try: + import pyarrow as pa + except ModuleNotFoundError as e: + raise ModuleNotFoundError("For writes PyArrow needs to be installed") from e + + if not isinstance(df, pa.Table): + raise ValueError(f"Expected PyArrow table, got: {df}") + if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") @@ -954,6 +962,14 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T overwrite_filter: ALWAYS_TRUE when you overwrite all the data, or a boolean expression in case of a partial overwrite """ + try: + import pyarrow as pa + except ModuleNotFoundError as e: + raise ModuleNotFoundError("For writes PyArrow needs to be installed") from e + + if not isinstance(df, pa.Table): + raise ValueError(f"Expected PyArrow table, got: {df}") + if overwrite_filter != AlwaysTrue(): raise NotImplementedError("Cannot overwrite a subset of a table") diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index a095c13315..17dc997163 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -391,3 +391,21 @@ def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_w assert [row.added_data_files_count for row in rows] == [1, 1, 0, 1, 1] assert [row.existing_data_files_count for row in rows] == [0, 0, 0, 0, 0] assert [row.deleted_data_files_count for row in rows] == [0, 0, 1, 0, 0] + + +@pytest.mark.integration +def test_invalid_arguments(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_data_files" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + + with pytest.raises(ValueError, match="Expected PyArrow table, got: not a df"): + tbl.overwrite("not a df") + + with pytest.raises(ValueError, match="Expected PyArrow table, got: not a df"): + tbl.append("not a df") From f66e3652fdf9720d6c63a6fcec7bcd08d5bb186c Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Mon, 29 Jan 2024 11:05:30 -0800 Subject: [PATCH 049/169] Create directories on a local filesystem (#301) * add a test for writing data to disk * create dir first * add fsspec test * make lint * add fs tests --- pyiceberg/io/__init__.py | 2 +- pyiceberg/io/fsspec.py | 3 +- pyiceberg/io/pyarrow.py | 11 +++++-- tests/catalog/test_sql.py | 62 +++++++++++++++++++++++++++++++++++++-- tests/io/test_fsspec.py | 23 +++++++++++++++ tests/io/test_pyarrow.py | 20 +++++++++++++ 6 files changed, 114 insertions(+), 7 deletions(-) diff --git a/pyiceberg/io/__init__.py b/pyiceberg/io/__init__.py index 1b0bc71af0..f03048c8cd 100644 --- a/pyiceberg/io/__init__.py +++ b/pyiceberg/io/__init__.py @@ -275,7 +275,7 @@ def delete(self, location: Union[str, InputFile, OutputFile]) -> None: "s3a": [ARROW_FILE_IO, FSSPEC_FILE_IO], "s3n": [ARROW_FILE_IO, FSSPEC_FILE_IO], "gs": [ARROW_FILE_IO], - "file": [ARROW_FILE_IO], + "file": [ARROW_FILE_IO, FSSPEC_FILE_IO], "hdfs": [ARROW_FILE_IO], "abfs": [FSSPEC_FILE_IO], "abfss": [FSSPEC_FILE_IO], diff --git a/pyiceberg/io/fsspec.py b/pyiceberg/io/fsspec.py index 6887007c7f..957cac66f2 100644 --- a/pyiceberg/io/fsspec.py +++ b/pyiceberg/io/fsspec.py @@ -99,7 +99,7 @@ def s3v4_rest_signer(properties: Properties, request: AWSRequest, **_: Any) -> A def _file(_: Properties) -> LocalFileSystem: - return LocalFileSystem() + return LocalFileSystem(auto_mkdir=True) def _s3(properties: Properties) -> AbstractFileSystem: @@ -173,6 +173,7 @@ def _adlfs(properties: Properties) -> AbstractFileSystem: SCHEME_TO_FS = { + "": _file, "file": _file, "s3": _s3, "s3a": _s3, diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index cbfb9f6d5a..1d7dcbef77 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -173,6 +173,13 @@ T = TypeVar("T") +class PyArrowLocalFileSystem(pyarrow.fs.LocalFileSystem): + def open_output_stream(self, path: str, *args: Any, **kwargs: Any) -> pyarrow.NativeFile: + # In LocalFileSystem, parent directories must be first created before opening an output stream + self.create_dir(os.path.dirname(path), recursive=True) + return super().open_output_stream(path, *args, **kwargs) + + class PyArrowFile(InputFile, OutputFile): """A combined InputFile and OutputFile implementation that uses a pyarrow filesystem to generate pyarrow.lib.NativeFile instances. @@ -379,9 +386,7 @@ def _initialize_fs(self, scheme: str, netloc: Optional[str] = None) -> FileSyste return GcsFileSystem(**gcs_kwargs) elif scheme == "file": - from pyarrow.fs import LocalFileSystem - - return LocalFileSystem() + return PyArrowLocalFileSystem() else: raise ValueError(f"Unrecognized filesystem type in URI: {scheme}") diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 217ea8f535..9dbcf8f84e 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -19,6 +19,7 @@ from pathlib import Path from typing import Generator, List +import pyarrow as pa import pytest from pytest import TempPathFactory from pytest_lazyfixture import lazy_fixture @@ -35,7 +36,10 @@ NoSuchTableError, TableAlreadyExistsError, ) +from pyiceberg.io import FSSPEC_FILE_IO, PY_IO_IMPL +from pyiceberg.io.pyarrow import schema_to_pyarrow from pyiceberg.schema import Schema +from pyiceberg.table.snapshots import Operation from pyiceberg.table.sorting import ( NullOrder, SortDirection, @@ -80,7 +84,7 @@ def catalog_memory(warehouse: Path) -> Generator[SqlCatalog, None, None]: @pytest.fixture(scope="module") def catalog_sqlite(warehouse: Path) -> Generator[SqlCatalog, None, None]: props = { - "uri": "sqlite:////tmp/sql-catalog.db", + "uri": f"sqlite:////{warehouse}/sql-catalog.db", "warehouse": f"file://{warehouse}", } catalog = SqlCatalog("test_sql_catalog", **props) @@ -92,7 +96,7 @@ def catalog_sqlite(warehouse: Path) -> Generator[SqlCatalog, None, None]: @pytest.fixture(scope="module") def catalog_sqlite_without_rowcount(warehouse: Path) -> Generator[SqlCatalog, None, None]: props = { - "uri": "sqlite:////tmp/sql-catalog.db", + "uri": f"sqlite:////{warehouse}/sql-catalog.db", "warehouse": f"file://{warehouse}", } catalog = SqlCatalog("test_sql_catalog", **props) @@ -102,6 +106,19 @@ def catalog_sqlite_without_rowcount(warehouse: Path) -> Generator[SqlCatalog, No catalog.destroy_tables() +@pytest.fixture(scope="module") +def catalog_sqlite_fsspec(warehouse: Path) -> Generator[SqlCatalog, None, None]: + props = { + "uri": f"sqlite:////{warehouse}/sql-catalog.db", + "warehouse": f"file://{warehouse}", + PY_IO_IMPL: FSSPEC_FILE_IO, + } + catalog = SqlCatalog("test_sql_catalog", **props) + catalog.create_tables() + yield catalog + catalog.destroy_tables() + + def test_creation_with_no_uri() -> None: with pytest.raises(NoSuchPropertyException): SqlCatalog("test_ddb_catalog", not_uri="unused") @@ -722,6 +739,47 @@ def test_commit_table(catalog: SqlCatalog, table_schema_nested: Schema, random_i assert new_schema.find_field("b").field_type == IntegerType() +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + lazy_fixture('catalog_sqlite_fsspec'), + ], +) +def test_append_table(catalog: SqlCatalog, table_schema_simple: Schema, random_identifier: Identifier) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table = catalog.create_table(random_identifier, table_schema_simple) + + df = pa.Table.from_pydict( + { + "foo": ["a"], + "bar": [1], + "baz": [True], + }, + schema=schema_to_pyarrow(table_schema_simple), + ) + + table.append(df) + + # new snapshot is written in APPEND mode + assert len(table.metadata.snapshots) == 1 + assert table.metadata.snapshots[0].snapshot_id == table.metadata.current_snapshot_id + assert table.metadata.snapshots[0].parent_snapshot_id is None + assert table.metadata.snapshots[0].sequence_number == 1 + assert table.metadata.snapshots[0].summary is not None + assert table.metadata.snapshots[0].summary.operation == Operation.APPEND + assert table.metadata.snapshots[0].summary['added-data-files'] == '1' + assert table.metadata.snapshots[0].summary['added-records'] == '1' + assert table.metadata.snapshots[0].summary['total-data-files'] == '1' + assert table.metadata.snapshots[0].summary['total-records'] == '1' + + # read back the data + assert df == table.scan().to_arrow() + + @pytest.mark.parametrize( 'catalog', [ diff --git a/tests/io/test_fsspec.py b/tests/io/test_fsspec.py index f83268b56f..9f044454b4 100644 --- a/tests/io/test_fsspec.py +++ b/tests/io/test_fsspec.py @@ -15,10 +15,13 @@ # specific language governing permissions and limitations # under the License. +import os +import tempfile import uuid import pytest from botocore.awsrequest import AWSRequest +from fsspec.implementations.local import LocalFileSystem from requests_mock import Mocker from pyiceberg.exceptions import SignError @@ -27,6 +30,26 @@ from pyiceberg.io.pyarrow import PyArrowFileIO +def test_fsspec_infer_local_fs_from_path(fsspec_fileio: FsspecFileIO) -> None: + """Test path with `file` scheme and no scheme both use LocalFileSystem""" + assert isinstance(fsspec_fileio.new_output("file://tmp/warehouse")._fs, LocalFileSystem) + assert isinstance(fsspec_fileio.new_output("/tmp/warehouse")._fs, LocalFileSystem) + + +def test_fsspec_local_fs_can_create_path_without_parent_dir(fsspec_fileio: FsspecFileIO) -> None: + """Test LocalFileSystem can create path without first creating the parent directories""" + with tempfile.TemporaryDirectory() as tmpdirname: + file_path = f"{tmpdirname}/foo/bar/baz.txt" + output_file = fsspec_fileio.new_output(file_path) + parent_path = os.path.dirname(file_path) + assert output_file._fs.exists(parent_path) is False + try: + with output_file.create() as f: + f.write(b"foo") + except Exception: + pytest.fail("Failed to write to file without parent directory") + + @pytest.mark.s3 def test_fsspec_new_input_file(fsspec_fileio: FsspecFileIO) -> None: """Test creating a new input file from a fsspec file-io""" diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index e6f4de3596..0628ed4893 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -92,6 +92,26 @@ ) +def test_pyarrow_infer_local_fs_from_path() -> None: + """Test path with `file` scheme and no scheme both use LocalFileSystem""" + assert isinstance(PyArrowFileIO().new_output("file://tmp/warehouse")._filesystem, LocalFileSystem) + assert isinstance(PyArrowFileIO().new_output("/tmp/warehouse")._filesystem, LocalFileSystem) + + +def test_pyarrow_local_fs_can_create_path_without_parent_dir() -> None: + """Test LocalFileSystem can create path without first creating the parent directories""" + with tempfile.TemporaryDirectory() as tmpdirname: + file_path = f"{tmpdirname}/foo/bar/baz.txt" + output_file = PyArrowFileIO().new_output(file_path) + parent_path = os.path.dirname(file_path) + assert output_file._filesystem.get_file_info(parent_path).type == FileType.NotFound + try: + with output_file.create() as f: + f.write(b"foo") + except Exception: + pytest.fail("Failed to write to file without parent directory") + + def test_pyarrow_input_file() -> None: """Test reading a file using PyArrowFile""" From a3e36838b05e4ccccac434ab394af758054b12d6 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Mon, 29 Jan 2024 20:05:55 +0100 Subject: [PATCH 050/169] docs: Remove feature support page (#316) With the 0.6.0 release we'll check the last box, and we have feature parity with Java according to this table. I would prefer creating tickets on Github to point out the gaps rather than this page. --- mkdocs/docs/SUMMARY.md | 1 - mkdocs/docs/feature-support.md | 90 ---------------------------------- 2 files changed, 91 deletions(-) delete mode 100644 mkdocs/docs/feature-support.md diff --git a/mkdocs/docs/SUMMARY.md b/mkdocs/docs/SUMMARY.md index 46383823ec..77dbcbbf89 100644 --- a/mkdocs/docs/SUMMARY.md +++ b/mkdocs/docs/SUMMARY.md @@ -22,7 +22,6 @@ - [CLI](cli.md) - [API](api.md) - [Contributing](contributing.md) -- [Feature support](feature-support.md) - Releases - [Verify a release](verify-release.md) - [How to release](how-to-release.md) diff --git a/mkdocs/docs/feature-support.md b/mkdocs/docs/feature-support.md deleted file mode 100644 index 535b34e128..0000000000 --- a/mkdocs/docs/feature-support.md +++ /dev/null @@ -1,90 +0,0 @@ ---- -hide: - - navigation ---- - - - -# Feature Support - -The goal is that the python library will provide a functional, performant subset of the Java library. The initial focus has been on reading table metadata and provide a convenient CLI to go through the catalog. - -## Metadata - -| Operation | Java | Python | -| :----------------------- | :--: | :----: | -| Get Schema | X | X | -| Get Snapshots | X | X | -| Plan Scan | X | X | -| Plan Scan for Snapshot | X | X | -| Update Current Snapshot | X | | -| Create Table | X | X | -| Rename Table | X | X | -| Drop Table | X | X | -| Alter Table | X | X | -| Set Table Properties | X | X | -| Create Namespace | X | X | -| Drop Namespace | X | X | -| Set Namespace Properties | X | X | - -## Types - -The types are kept in `pyiceberg.types`. - -Primitive types: - -- `BooleanType` -- `StringType` -- `IntegerType` -- `LongType` -- `FloatType` -- `DoubleType` -- `DateType` -- `TimeType` -- `TimestampType` -- `TimestamptzType` -- `BinaryType` -- `UUIDType` - -Complex types: - -- `StructType` -- `ListType` -- `MapType` -- `FixedType(16)` -- `DecimalType(8, 3)` - -## Expressions - -The expressions are kept in `pyiceberg.expressions`. - -- `IsNull` -- `NotNull` -- `IsNaN` -- `NotNaN` -- `In` -- `NotIn` -- `EqualTo` -- `NotEqualTo` -- `GreaterThanOrEqual` -- `GreaterThan` -- `LessThanOrEqual` -- `LessThan` -- `And` -- `Or` -- `Not` From 02e64300aee376a76c175be253a29dcd7c31f0cc Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Mon, 29 Jan 2024 21:10:39 -0500 Subject: [PATCH 051/169] `create_table` with a PyArrow Schema (#305) --- mkdocs/docs/api.md | 19 ++++ pyiceberg/catalog/__init__.py | 22 +++- pyiceberg/catalog/dynamodb.py | 8 +- pyiceberg/catalog/glue.py | 8 +- pyiceberg/catalog/hive.py | 9 +- pyiceberg/catalog/noop.py | 6 +- pyiceberg/catalog/rest.py | 8 +- pyiceberg/catalog/sql.py | 8 +- pyiceberg/io/pyarrow.py | 25 ++++- pyiceberg/schema.py | 53 ++++++---- pyproject.toml | 2 +- tests/catalog/test_base.py | 34 +++++- tests/catalog/test_dynamodb.py | 18 ++++ tests/catalog/test_glue.py | 23 ++++ tests/catalog/test_sql.py | 20 ++++ tests/conftest.py | 173 +++++++++++++++++++++++++++++++ tests/io/test_pyarrow_visitor.py | 120 ++++----------------- 17 files changed, 417 insertions(+), 139 deletions(-) diff --git a/mkdocs/docs/api.md b/mkdocs/docs/api.md index 6f79835db0..650d391807 100644 --- a/mkdocs/docs/api.md +++ b/mkdocs/docs/api.md @@ -146,6 +146,25 @@ catalog.create_table( ) ``` +To create a table using a pyarrow schema: + +```python +import pyarrow as pa + +schema = pa.schema( + [ + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=False), + pa.field("baz", pa.bool_(), nullable=True), + ] +) + +catalog.create_table( + identifier="docs_example.bids", + schema=schema, +) +``` + ## Load a table ### Catalog table diff --git a/pyiceberg/catalog/__init__.py b/pyiceberg/catalog/__init__.py index a39d0e915c..6e5dc2748f 100644 --- a/pyiceberg/catalog/__init__.py +++ b/pyiceberg/catalog/__init__.py @@ -24,6 +24,7 @@ from dataclasses import dataclass from enum import Enum from typing import ( + TYPE_CHECKING, Callable, Dict, List, @@ -56,6 +57,9 @@ ) from pyiceberg.utils.config import Config, merge_config +if TYPE_CHECKING: + import pyarrow as pa + logger = logging.getLogger(__name__) _ENV_CONFIG = Config() @@ -288,7 +292,7 @@ def _load_file_io(self, properties: Properties = EMPTY_DICT, location: Optional[ def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -512,6 +516,22 @@ def _check_for_overlap(removals: Optional[Set[str]], updates: Properties) -> Non if overlap: raise ValueError(f"Updates and deletes have an overlap: {overlap}") + @staticmethod + def _convert_schema_if_needed(schema: Union[Schema, "pa.Schema"]) -> Schema: + if isinstance(schema, Schema): + return schema + try: + import pyarrow as pa + + from pyiceberg.io.pyarrow import _ConvertToIcebergWithoutIDs, visit_pyarrow + + if isinstance(schema, pa.Schema): + schema: Schema = visit_pyarrow(schema, _ConvertToIcebergWithoutIDs()) # type: ignore + return schema + except ModuleNotFoundError: + pass + raise ValueError(f"{type(schema)=}, but it must be pyiceberg.schema.Schema or pyarrow.Schema") + def _resolve_table_location(self, location: Optional[str], database_name: str, table_name: str) -> str: if not location: return self._get_default_warehouse_location(database_name, table_name) diff --git a/pyiceberg/catalog/dynamodb.py b/pyiceberg/catalog/dynamodb.py index 6c3f931bd8..d5f3b5e14c 100644 --- a/pyiceberg/catalog/dynamodb.py +++ b/pyiceberg/catalog/dynamodb.py @@ -17,6 +17,7 @@ import uuid from time import time from typing import ( + TYPE_CHECKING, Any, Dict, List, @@ -57,6 +58,9 @@ from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT +if TYPE_CHECKING: + import pyarrow as pa + DYNAMODB_CLIENT = "dynamodb" DYNAMODB_COL_IDENTIFIER = "identifier" @@ -127,7 +131,7 @@ def _dynamodb_table_exists(self) -> bool: def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -152,6 +156,8 @@ def create_table( ValueError: If the identifier is invalid, or no path is given to store metadata. """ + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + database_name, table_name = self.identifier_to_database_and_table(identifier) location = self._resolve_table_location(location, database_name, table_name) diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index 645568f80a..8f860fabba 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -17,6 +17,7 @@ from typing import ( + TYPE_CHECKING, Any, Dict, List, @@ -88,6 +89,9 @@ UUIDType, ) +if TYPE_CHECKING: + import pyarrow as pa + # If Glue should skip archiving an old table version when creating a new version in a commit. By # default, Glue archives all old table versions after an UpdateTable call, but Glue has a default # max number of archived table versions (can be increased). So for streaming use case with lots @@ -329,7 +333,7 @@ def _get_glue_table(self, database_name: str, table_name: str) -> TableTypeDef: def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -354,6 +358,8 @@ def create_table( ValueError: If the identifier is invalid, or no path is given to store metadata. """ + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + database_name, table_name = self.identifier_to_database_and_table(identifier) location = self._resolve_table_location(location, database_name, table_name) diff --git a/pyiceberg/catalog/hive.py b/pyiceberg/catalog/hive.py index 331b9ca80d..8069321095 100644 --- a/pyiceberg/catalog/hive.py +++ b/pyiceberg/catalog/hive.py @@ -18,6 +18,7 @@ import time from types import TracebackType from typing import ( + TYPE_CHECKING, Any, Dict, List, @@ -91,6 +92,10 @@ UUIDType, ) +if TYPE_CHECKING: + import pyarrow as pa + + # Replace by visitor hive_types = { BooleanType: "boolean", @@ -250,7 +255,7 @@ def _convert_hive_into_iceberg(self, table: HiveTable, io: FileIO) -> Table: def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -273,6 +278,8 @@ def create_table( AlreadyExistsError: If a table with the name already exists. ValueError: If the identifier is invalid. """ + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + properties = {**DEFAULT_PROPERTIES, **properties} database_name, table_name = self.identifier_to_database_and_table(identifier) current_time_millis = int(time.time() * 1000) diff --git a/pyiceberg/catalog/noop.py b/pyiceberg/catalog/noop.py index 083f851d1c..a8b7154621 100644 --- a/pyiceberg/catalog/noop.py +++ b/pyiceberg/catalog/noop.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. from typing import ( + TYPE_CHECKING, List, Optional, Set, @@ -33,12 +34,15 @@ from pyiceberg.table.sorting import UNSORTED_SORT_ORDER from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties +if TYPE_CHECKING: + import pyarrow as pa + class NoopCatalog(Catalog): def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index de192a9e0b..34d75b5936 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -16,6 +16,7 @@ # under the License. from json import JSONDecodeError from typing import ( + TYPE_CHECKING, Any, Dict, List, @@ -68,6 +69,9 @@ from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT, UTF8, IcebergBaseModel +if TYPE_CHECKING: + import pyarrow as pa + ICEBERG_REST_SPEC_VERSION = "0.14.1" @@ -437,12 +441,14 @@ def _response_to_table(self, identifier_tuple: Tuple[str, ...], table_response: def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, properties: Properties = EMPTY_DICT, ) -> Table: + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + namespace_and_table = self._split_identifier_for_path(identifier) request = CreateTableRequest( name=namespace_and_table["table"], diff --git a/pyiceberg/catalog/sql.py b/pyiceberg/catalog/sql.py index 593c6b54a1..8a02b20dfc 100644 --- a/pyiceberg/catalog/sql.py +++ b/pyiceberg/catalog/sql.py @@ -16,6 +16,7 @@ # under the License. from typing import ( + TYPE_CHECKING, List, Optional, Set, @@ -65,6 +66,9 @@ from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT +if TYPE_CHECKING: + import pyarrow as pa + class SqlCatalogBaseTable(MappedAsDataclass, DeclarativeBase): pass @@ -140,7 +144,7 @@ def _convert_orm_to_iceberg(self, orm_table: IcebergTables) -> Table: def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, @@ -165,6 +169,8 @@ def create_table( ValueError: If the identifier is invalid, or no path is given to store metadata. """ + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + database_name, table_name = self.identifier_to_database_and_table(identifier) if not self._namespace_exists(database_name): raise NoSuchNamespaceError(f"Namespace does not exist: {database_name}") diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 1d7dcbef77..7a94ce4c7d 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -26,6 +26,7 @@ from __future__ import annotations import concurrent.futures +import itertools import logging import os import re @@ -34,7 +35,6 @@ from dataclasses import dataclass from enum import Enum from functools import lru_cache, singledispatch -from itertools import chain from typing import ( TYPE_CHECKING, Any, @@ -637,7 +637,7 @@ def _combine_positional_deletes(positional_deletes: List[pa.ChunkedArray], rows: if len(positional_deletes) == 1: all_chunks = positional_deletes[0] else: - all_chunks = pa.chunked_array(chain(*[arr.chunks for arr in positional_deletes])) + all_chunks = pa.chunked_array(itertools.chain(*[arr.chunks for arr in positional_deletes])) return np.setdiff1d(np.arange(rows), all_chunks, assume_unique=False) @@ -912,6 +912,21 @@ def after_map_value(self, element: pa.Field) -> None: self._field_names.pop() +class _ConvertToIcebergWithoutIDs(_ConvertToIceberg): + """ + Converts PyArrowSchema to Iceberg Schema with all -1 ids. + + The schema generated through this visitor should always be + used in conjunction with `new_table_metadata` function to + assign new field ids in order. This is currently used only + when creating an Iceberg Schema from a PyArrow schema when + creating a new Iceberg table. + """ + + def _field_id(self, field: pa.Field) -> int: + return -1 + + def _task_to_table( fs: FileSystem, task: FileScanTask, @@ -999,7 +1014,7 @@ def _task_to_table( def _read_all_delete_files(fs: FileSystem, tasks: Iterable[FileScanTask]) -> Dict[str, List[ChunkedArray]]: deletes_per_file: Dict[str, List[ChunkedArray]] = {} - unique_deletes = set(chain.from_iterable([task.delete_files for task in tasks])) + unique_deletes = set(itertools.chain.from_iterable([task.delete_files for task in tasks])) if len(unique_deletes) > 0: executor = ExecutorFactory.get_or_create() deletes_per_files: Iterator[Dict[str, ChunkedArray]] = executor.map( @@ -1421,7 +1436,7 @@ def schema(self, schema: Schema, struct_result: Callable[[], List[StatisticsColl def struct( self, struct: StructType, field_results: List[Callable[[], List[StatisticsCollector]]] ) -> List[StatisticsCollector]: - return list(chain(*[result() for result in field_results])) + return list(itertools.chain(*[result() for result in field_results])) def field(self, field: NestedField, field_result: Callable[[], List[StatisticsCollector]]) -> List[StatisticsCollector]: self._field_id = field.field_id @@ -1513,7 +1528,7 @@ def schema(self, schema: Schema, struct_result: Callable[[], List[ID2ParquetPath return struct_result() def struct(self, struct: StructType, field_results: List[Callable[[], List[ID2ParquetPath]]]) -> List[ID2ParquetPath]: - return list(chain(*[result() for result in field_results])) + return list(itertools.chain(*[result() for result in field_results])) def field(self, field: NestedField, field_result: Callable[[], List[ID2ParquetPath]]) -> List[ID2ParquetPath]: self._field_id = field.field_id diff --git a/pyiceberg/schema.py b/pyiceberg/schema.py index b61e4678b9..6dd174f325 100644 --- a/pyiceberg/schema.py +++ b/pyiceberg/schema.py @@ -1221,50 +1221,57 @@ def assign_fresh_schema_ids(schema_or_type: Union[Schema, IcebergType], next_id: class _SetFreshIDs(PreOrderSchemaVisitor[IcebergType]): """Traverses the schema and assigns monotonically increasing ids.""" - reserved_ids: Dict[int, int] + old_id_to_new_id: Dict[int, int] def __init__(self, next_id_func: Optional[Callable[[], int]] = None) -> None: - self.reserved_ids = {} + self.old_id_to_new_id = {} counter = itertools.count(1) self.next_id_func = next_id_func if next_id_func is not None else lambda: next(counter) - def _get_and_increment(self) -> int: - return self.next_id_func() + def _get_and_increment(self, current_id: int) -> int: + new_id = self.next_id_func() + self.old_id_to_new_id[current_id] = new_id + return new_id def schema(self, schema: Schema, struct_result: Callable[[], StructType]) -> Schema: - # First we keep the original identifier_field_ids here, we remap afterwards - fields = struct_result().fields - return Schema(*fields, identifier_field_ids=[self.reserved_ids[field_id] for field_id in schema.identifier_field_ids]) + return Schema( + *struct_result().fields, + identifier_field_ids=[self.old_id_to_new_id[field_id] for field_id in schema.identifier_field_ids], + ) def struct(self, struct: StructType, field_results: List[Callable[[], IcebergType]]) -> StructType: - # assign IDs for this struct's fields first - self.reserved_ids.update({field.field_id: self._get_and_increment() for field in struct.fields}) - return StructType(*[field() for field in field_results]) + new_ids = [self._get_and_increment(field.field_id) for field in struct.fields] + new_fields = [] + for field_id, field, field_type in zip(new_ids, struct.fields, field_results): + new_fields.append( + NestedField( + field_id=field_id, + name=field.name, + field_type=field_type(), + required=field.required, + doc=field.doc, + ) + ) + return StructType(*new_fields) def field(self, field: NestedField, field_result: Callable[[], IcebergType]) -> IcebergType: - return NestedField( - field_id=self.reserved_ids[field.field_id], - name=field.name, - field_type=field_result(), - required=field.required, - doc=field.doc, - ) + return field_result() def list(self, list_type: ListType, element_result: Callable[[], IcebergType]) -> ListType: - self.reserved_ids[list_type.element_id] = self._get_and_increment() + element_id = self._get_and_increment(list_type.element_id) return ListType( - element_id=self.reserved_ids[list_type.element_id], + element_id=element_id, element=element_result(), element_required=list_type.element_required, ) def map(self, map_type: MapType, key_result: Callable[[], IcebergType], value_result: Callable[[], IcebergType]) -> MapType: - self.reserved_ids[map_type.key_id] = self._get_and_increment() - self.reserved_ids[map_type.value_id] = self._get_and_increment() + key_id = self._get_and_increment(map_type.key_id) + value_id = self._get_and_increment(map_type.value_id) return MapType( - key_id=self.reserved_ids[map_type.key_id], + key_id=key_id, key_type=key_result(), - value_id=self.reserved_ids[map_type.value_id], + value_id=value_id, value_type=value_result(), value_required=map_type.value_required, ) diff --git a/pyproject.toml b/pyproject.toml index e7f18b5551..d1bc82dc62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -311,7 +311,7 @@ select = [ "I", # isort "UP", # pyupgrade ] -ignore = ["E501","E203","B024","B028"] +ignore = ["E501","E203","B024","B028","UP037"] # Allow autofix for all enabled rules (when `--fix`) is provided. fixable = ["ALL"] diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index 911c06b27a..d15c90fee3 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -24,7 +24,9 @@ Union, ) +import pyarrow as pa import pytest +from pytest_lazyfixture import lazy_fixture from pyiceberg.catalog import ( Catalog, @@ -72,12 +74,14 @@ def __init__(self, name: str, **properties: str) -> None: def create_table( self, identifier: Union[str, Identifier], - schema: Schema, + schema: Union[Schema, "pa.Schema"], location: Optional[str] = None, partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, properties: Properties = EMPTY_DICT, ) -> Table: + schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + identifier = Catalog.identifier_to_tuple(identifier) namespace = Catalog.namespace_from(identifier) @@ -330,6 +334,34 @@ def test_create_table(catalog: InMemoryCatalog) -> None: assert catalog.load_table(TEST_TABLE_IDENTIFIER) == table +@pytest.mark.parametrize( + "schema,expected", + [ + (lazy_fixture("pyarrow_schema_simple_without_ids"), lazy_fixture("iceberg_schema_simple_no_ids")), + (lazy_fixture("iceberg_schema_simple"), lazy_fixture("iceberg_schema_simple")), + (lazy_fixture("iceberg_schema_nested"), lazy_fixture("iceberg_schema_nested")), + (lazy_fixture("pyarrow_schema_nested_without_ids"), lazy_fixture("iceberg_schema_nested_no_ids")), + ], +) +def test_convert_schema_if_needed( + schema: Union[Schema, pa.Schema], + expected: Schema, + catalog: InMemoryCatalog, +) -> None: + assert expected == catalog._convert_schema_if_needed(schema) + + +def test_create_table_pyarrow_schema(catalog: InMemoryCatalog, pyarrow_schema_simple_without_ids: pa.Schema) -> None: + table = catalog.create_table( + identifier=TEST_TABLE_IDENTIFIER, + schema=pyarrow_schema_simple_without_ids, + location=TEST_TABLE_LOCATION, + partition_spec=TEST_TABLE_PARTITION_SPEC, + properties=TEST_TABLE_PROPERTIES, + ) + assert catalog.load_table(TEST_TABLE_IDENTIFIER) == table + + def test_create_table_raises_error_when_table_already_exists(catalog: InMemoryCatalog) -> None: # Given given_catalog_has_a_table(catalog) diff --git a/tests/catalog/test_dynamodb.py b/tests/catalog/test_dynamodb.py index 5af89ef3be..bc801463c5 100644 --- a/tests/catalog/test_dynamodb.py +++ b/tests/catalog/test_dynamodb.py @@ -18,6 +18,7 @@ from unittest import mock import boto3 +import pyarrow as pa import pytest from moto import mock_dynamodb @@ -71,6 +72,23 @@ def test_create_table_with_database_location( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) +@mock_dynamodb +def test_create_table_with_pyarrow_schema( + _bucket_initialize: None, + moto_endpoint_url: str, + pyarrow_schema_simple_without_ids: pa.Schema, + database_name: str, + table_name: str, +) -> None: + catalog_name = "test_ddb_catalog" + identifier = (database_name, table_name) + test_catalog = DynamoDbCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url}) + test_catalog.create_namespace(namespace=database_name, properties={"location": f"s3://{BUCKET_NAME}/{database_name}.db"}) + table = test_catalog.create_table(identifier, pyarrow_schema_simple_without_ids) + assert table.identifier == (catalog_name,) + identifier + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + + @mock_dynamodb def test_create_table_with_default_warehouse( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index b1f1371a04..63a213f94f 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -18,6 +18,7 @@ from unittest import mock import boto3 +import pyarrow as pa import pytest from moto import mock_glue @@ -101,6 +102,28 @@ def test_create_table_with_given_location( assert test_catalog._parse_metadata_version(table.metadata_location) == 0 +@mock_glue +def test_create_table_with_pyarrow_schema( + _bucket_initialize: None, + moto_endpoint_url: str, + pyarrow_schema_simple_without_ids: pa.Schema, + database_name: str, + table_name: str, +) -> None: + catalog_name = "glue" + identifier = (database_name, table_name) + test_catalog = GlueCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url}) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table( + identifier=identifier, + schema=pyarrow_schema_simple_without_ids, + location=f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}", + ) + assert table.identifier == (catalog_name,) + identifier + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + + @mock_glue def test_create_table_with_no_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 9dbcf8f84e..1ca8fd16d2 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -158,6 +158,26 @@ def test_create_table_default_sort_order(catalog: SqlCatalog, table_schema_neste catalog.drop_table(random_identifier) +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + ], +) +def test_create_table_with_pyarrow_schema( + catalog: SqlCatalog, + pyarrow_schema_simple_without_ids: pa.Schema, + iceberg_table_schema_simple: Schema, + random_identifier: Identifier, +) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table = catalog.create_table(random_identifier, pyarrow_schema_simple_without_ids) + assert table.schema() == iceberg_table_schema_simple + catalog.drop_table(random_identifier) + + @pytest.mark.parametrize( 'catalog', [ diff --git a/tests/conftest.py b/tests/conftest.py index 9c53301776..d9a8dfdf07 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -45,6 +45,7 @@ from urllib.parse import urlparse import boto3 +import pyarrow as pa import pytest from moto import mock_dynamodb, mock_glue from moto.server import ThreadedMotoServer # type: ignore @@ -267,6 +268,178 @@ def table_schema_nested_with_struct_key_map() -> Schema: ) +@pytest.fixture(scope="session") +def pyarrow_schema_simple_without_ids() -> pa.Schema: + return pa.schema([ + pa.field('foo', pa.string(), nullable=True), + pa.field('bar', pa.int32(), nullable=False), + pa.field('baz', pa.bool_(), nullable=True), + ]) + + +@pytest.fixture(scope="session") +def pyarrow_schema_nested_without_ids() -> pa.Schema: + return pa.schema([ + pa.field('foo', pa.string(), nullable=False), + pa.field('bar', pa.int32(), nullable=False), + pa.field('baz', pa.bool_(), nullable=True), + pa.field('qux', pa.list_(pa.string()), nullable=False), + pa.field( + 'quux', + pa.map_( + pa.string(), + pa.map_(pa.string(), pa.int32()), + ), + nullable=False, + ), + pa.field( + 'location', + pa.list_( + pa.struct([ + pa.field('latitude', pa.float32(), nullable=False), + pa.field('longitude', pa.float32(), nullable=False), + ]), + ), + nullable=False, + ), + pa.field( + 'person', + pa.struct([ + pa.field('name', pa.string(), nullable=True), + pa.field('age', pa.int32(), nullable=False), + ]), + nullable=True, + ), + ]) + + +@pytest.fixture(scope="session") +def iceberg_schema_simple() -> Schema: + return Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + ) + + +@pytest.fixture(scope="session") +def iceberg_schema_simple_no_ids() -> Schema: + return Schema( + NestedField(field_id=-1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=-1, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=-1, name="baz", field_type=BooleanType(), required=False), + ) + + +@pytest.fixture(scope="session") +def iceberg_table_schema_simple() -> Schema: + return Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + schema_id=0, + identifier_field_ids=[], + ) + + +@pytest.fixture(scope="session") +def iceberg_schema_nested() -> Schema: + return Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=True), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + NestedField( + field_id=4, + name="qux", + field_type=ListType(element_id=5, element_type=StringType(), element_required=False), + required=True, + ), + NestedField( + field_id=6, + name="quux", + field_type=MapType( + key_id=7, + key_type=StringType(), + value_id=8, + value_type=MapType(key_id=9, key_type=StringType(), value_id=10, value_type=IntegerType(), value_required=False), + value_required=False, + ), + required=True, + ), + NestedField( + field_id=11, + name="location", + field_type=ListType( + element_id=12, + element_type=StructType( + NestedField(field_id=13, name="latitude", field_type=FloatType(), required=True), + NestedField(field_id=14, name="longitude", field_type=FloatType(), required=True), + ), + element_required=False, + ), + required=True, + ), + NestedField( + field_id=15, + name="person", + field_type=StructType( + NestedField(field_id=16, name="name", field_type=StringType(), required=False), + NestedField(field_id=17, name="age", field_type=IntegerType(), required=True), + ), + required=False, + ), + ) + + +@pytest.fixture(scope="session") +def iceberg_schema_nested_no_ids() -> Schema: + return Schema( + NestedField(field_id=-1, name="foo", field_type=StringType(), required=True), + NestedField(field_id=-1, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=-1, name="baz", field_type=BooleanType(), required=False), + NestedField( + field_id=-1, + name="qux", + field_type=ListType(element_id=-1, element_type=StringType(), element_required=False), + required=True, + ), + NestedField( + field_id=-1, + name="quux", + field_type=MapType( + key_id=-1, + key_type=StringType(), + value_id=-1, + value_type=MapType(key_id=-1, key_type=StringType(), value_id=-1, value_type=IntegerType(), value_required=False), + value_required=False, + ), + required=True, + ), + NestedField( + field_id=-1, + name="location", + field_type=ListType( + element_id=-1, + element_type=StructType( + NestedField(field_id=-1, name="latitude", field_type=FloatType(), required=True), + NestedField(field_id=-1, name="longitude", field_type=FloatType(), required=True), + ), + element_required=False, + ), + required=True, + ), + NestedField( + field_id=-1, + name="person", + field_type=StructType( + NestedField(field_id=-1, name="name", field_type=StringType(), required=False), + NestedField(field_id=-1, name="age", field_type=IntegerType(), required=True), + ), + required=False, + ), + ) + + @pytest.fixture(scope="session") def all_avro_types() -> Dict[str, Any]: return { diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index 0986eac409..c7f364b920 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -23,6 +23,7 @@ from pyiceberg.io.pyarrow import ( _ConvertToArrowSchema, _ConvertToIceberg, + _ConvertToIcebergWithoutIDs, _HasIds, pyarrow_to_schema, schema_to_pyarrow, @@ -51,104 +52,6 @@ ) -@pytest.fixture(scope="module") -def pyarrow_schema_simple_without_ids() -> pa.Schema: - return pa.schema([pa.field('some_int', pa.int32(), nullable=True), pa.field('some_string', pa.string(), nullable=False)]) - - -@pytest.fixture(scope="module") -def pyarrow_schema_nested_without_ids() -> pa.Schema: - return pa.schema([ - pa.field('foo', pa.string(), nullable=False), - pa.field('bar', pa.int32(), nullable=False), - pa.field('baz', pa.bool_(), nullable=True), - pa.field('qux', pa.list_(pa.string()), nullable=False), - pa.field( - 'quux', - pa.map_( - pa.string(), - pa.map_(pa.string(), pa.int32()), - ), - nullable=False, - ), - pa.field( - 'location', - pa.list_( - pa.struct([ - pa.field('latitude', pa.float32(), nullable=False), - pa.field('longitude', pa.float32(), nullable=False), - ]), - ), - nullable=False, - ), - pa.field( - 'person', - pa.struct([ - pa.field('name', pa.string(), nullable=True), - pa.field('age', pa.int32(), nullable=False), - ]), - nullable=True, - ), - ]) - - -@pytest.fixture(scope="module") -def iceberg_schema_simple() -> Schema: - return Schema( - NestedField(field_id=1, name="some_int", field_type=IntegerType(), required=False), - NestedField(field_id=2, name="some_string", field_type=StringType(), required=True), - ) - - -@pytest.fixture(scope="module") -def iceberg_schema_nested() -> Schema: - return Schema( - NestedField(field_id=1, name="foo", field_type=StringType(), required=True), - NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), - NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), - NestedField( - field_id=4, - name="qux", - field_type=ListType(element_id=5, element_type=StringType(), element_required=False), - required=True, - ), - NestedField( - field_id=6, - name="quux", - field_type=MapType( - key_id=7, - key_type=StringType(), - value_id=8, - value_type=MapType(key_id=9, key_type=StringType(), value_id=10, value_type=IntegerType(), value_required=False), - value_required=False, - ), - required=True, - ), - NestedField( - field_id=11, - name="location", - field_type=ListType( - element_id=12, - element_type=StructType( - NestedField(field_id=13, name="latitude", field_type=FloatType(), required=True), - NestedField(field_id=14, name="longitude", field_type=FloatType(), required=True), - ), - element_required=False, - ), - required=True, - ), - NestedField( - field_id=15, - name="person", - field_type=StructType( - NestedField(field_id=16, name="name", field_type=StringType(), required=False), - NestedField(field_id=17, name="age", field_type=IntegerType(), required=True), - ), - required=False, - ), - ) - - def test_pyarrow_binary_to_iceberg() -> None: length = 23 pyarrow_type = pa.binary(length) @@ -468,8 +371,9 @@ def test_simple_pyarrow_schema_to_schema_missing_ids_using_name_mapping( ) -> None: schema = pyarrow_schema_simple_without_ids name_mapping = NameMapping([ - MappedField(field_id=1, names=['some_int']), - MappedField(field_id=2, names=['some_string']), + MappedField(field_id=1, names=['foo']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), ]) assert pyarrow_to_schema(schema, name_mapping) == iceberg_schema_simple @@ -480,11 +384,11 @@ def test_simple_pyarrow_schema_to_schema_missing_ids_using_name_mapping_partial_ ) -> None: schema = pyarrow_schema_simple_without_ids name_mapping = NameMapping([ - MappedField(field_id=1, names=['some_string']), + MappedField(field_id=1, names=['foo']), ]) with pytest.raises(ValueError) as exc_info: _ = pyarrow_to_schema(schema, name_mapping) - assert "Could not find field with name: some_int" in str(exc_info.value) + assert "Could not find field with name: bar" in str(exc_info.value) def test_nested_pyarrow_schema_to_schema_missing_ids_using_name_mapping( @@ -572,3 +476,15 @@ def test_pyarrow_schema_to_schema_missing_ids_using_name_mapping_nested_missing_ with pytest.raises(ValueError) as exc_info: _ = pyarrow_to_schema(schema, name_mapping) assert "Could not find field with name: quux.value.key" in str(exc_info.value) + + +def test_pyarrow_schema_to_schema_fresh_ids_simple_schema( + pyarrow_schema_simple_without_ids: pa.Schema, iceberg_schema_simple_no_ids: Schema +) -> None: + assert visit_pyarrow(pyarrow_schema_simple_without_ids, _ConvertToIcebergWithoutIDs()) == iceberg_schema_simple_no_ids + + +def test_pyarrow_schema_to_schema_fresh_ids_nested_schema( + pyarrow_schema_nested_without_ids: pa.Schema, iceberg_schema_nested_no_ids: Schema +) -> None: + assert visit_pyarrow(pyarrow_schema_nested_without_ids, _ConvertToIcebergWithoutIDs()) == iceberg_schema_nested_no_ids From 2801e930b67be87e500ceb0468ac72d1d17169d5 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 30 Jan 2024 06:55:32 +0100 Subject: [PATCH 052/169] docs: Add community page (#315) * docs: Add community page * Thanks Sung! Co-authored-by: Sung Yun <107272191+syun64@users.noreply.github.com> --------- Co-authored-by: Sung Yun <107272191+syun64@users.noreply.github.com> --- mkdocs/docs/SUMMARY.md | 1 + mkdocs/docs/community.md | 64 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) create mode 100644 mkdocs/docs/community.md diff --git a/mkdocs/docs/SUMMARY.md b/mkdocs/docs/SUMMARY.md index 77dbcbbf89..5504d2ac7b 100644 --- a/mkdocs/docs/SUMMARY.md +++ b/mkdocs/docs/SUMMARY.md @@ -22,6 +22,7 @@ - [CLI](cli.md) - [API](api.md) - [Contributing](contributing.md) +- [Community](community.md) - Releases - [Verify a release](verify-release.md) - [How to release](how-to-release.md) diff --git a/mkdocs/docs/community.md b/mkdocs/docs/community.md new file mode 100644 index 0000000000..4c542bff8e --- /dev/null +++ b/mkdocs/docs/community.md @@ -0,0 +1,64 @@ +--- +hide: + - navigation +--- + + + +# Join the community + +Apache Iceberg tracks issues in GitHub and prefers to receive contributions as pull requests. + +Community discussions happen primarily on the [dev mailing list](https://lists.apache.org/list.html?dev@iceberg.apache.org), on [Apache Iceberg Slack workspace](https://join.slack.com/t/apache-iceberg/shared_invite/zt-287g3akar-K9Oe_En5j1UL7Y_Ikpai3A) in the #python channel, and on specific [GitHub issues](https://github.com/apache/iceberg-python/issues). + +## Iceberg Community Events + +The PyIceberg community sync is on the last Tuesday of every month. To join, make sure to subscribe to the [iceberg-python-sync Google group](https://groups.google.com/g/iceberg-python-sync). + +## Community Guidelines + +### Apache Iceberg Community Guidelines + +The Apache Iceberg community is built on the principles described in the [Apache Way](https://www.apache.org/theapacheway/index.html) +and all who engage with the community are expected to be respectful, open, come with the best interests of the community in mind, +and abide by the Apache Foundation [Code of Conduct](https://www.apache.org/foundation/policies/conduct.html). + +### Participants with Corporate Interests + +A wide range of corporate entities have interests that overlap in both features and frameworks related to Iceberg and while we +encourage engagement and contributions, the community is not a venue for marketing, solicitation, or recruitment. + +Any vendor who wants to participate in the Apache Iceberg community Slack workspace should create a dedicated vendor channel +for their organization prefixed by `vendor-`. + +This space can be used to discuss features and integration with Iceberg related to the vendor offering. This space should not +be used to promote competing vendor products/services or disparage other vendor offerings. Discussion should be focused on +questions asked by the community and not to expand/introduce/redirect users to alternate offerings. + +### Marketing / Solicitation / Recruiting + +The Apache Iceberg community is a space for everyone to operate free of influence. The development lists, Slack workspace, +and GitHub should not be used to market products or services. Solicitation or overt promotion should not be performed in common +channels or through direct messages. + +Recruitment of community members should not be conducted through direct messages or community channels, but opportunities +related to contributing to or using Iceberg can be posted to the `#jobs` channel. + +For questions regarding any of the guidelines above, please contact a PMC member From 32299c10d397105c49eae969094ded2395129a0c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 07:03:47 +0100 Subject: [PATCH 053/169] Build: Bump aiohttp from 3.9.1 to 3.9.2 (#323) Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.9.1 to 3.9.2. - [Release notes](https://github.com/aio-libs/aiohttp/releases) - [Changelog](https://github.com/aio-libs/aiohttp/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/aiohttp/compare/v3.9.1...v3.9.2) --- updated-dependencies: - dependency-name: aiohttp dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 166 ++++++++++++++++++++++++++-------------------------- 1 file changed, 83 insertions(+), 83 deletions(-) diff --git a/poetry.lock b/poetry.lock index 41f13a7c23..a6d30ae24e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -46,87 +46,87 @@ boto3 = ["boto3 (>=1.33.2,<1.34.28)"] [[package]] name = "aiohttp" -version = "3.9.1" +version = "3.9.2" description = "Async http client/server framework (asyncio)" -optional = true +optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, - {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, - {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, - {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, - {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, - {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, - {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, - {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, - {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, - {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, - {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, - {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, + {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:772fbe371788e61c58d6d3d904268e48a594ba866804d08c995ad71b144f94cb"}, + {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:edd4f1af2253f227ae311ab3d403d0c506c9b4410c7fc8d9573dec6d9740369f"}, + {file = "aiohttp-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cfee9287778399fdef6f8a11c9e425e1cb13cc9920fd3a3df8f122500978292b"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc158466f6a980a6095ee55174d1de5730ad7dec251be655d9a6a9dd7ea1ff9"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54ec82f45d57c9a65a1ead3953b51c704f9587440e6682f689da97f3e8defa35"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abeb813a18eb387f0d835ef51f88568540ad0325807a77a6e501fed4610f864e"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc91d07280d7d169f3a0f9179d8babd0ee05c79d4d891447629ff0d7d8089ec2"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b65e861f4bebfb660f7f0f40fa3eb9f2ab9af10647d05dac824390e7af8f75b7"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04fd8ffd2be73d42bcf55fd78cde7958eeee6d4d8f73c3846b7cba491ecdb570"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3d8d962b439a859b3ded9a1e111a4615357b01620a546bc601f25b0211f2da81"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:8ceb658afd12b27552597cf9a65d9807d58aef45adbb58616cdd5ad4c258c39e"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0e4ee4df741670560b1bc393672035418bf9063718fee05e1796bf867e995fad"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2dec87a556f300d3211decf018bfd263424f0690fcca00de94a837949fbcea02"}, + {file = "aiohttp-3.9.2-cp310-cp310-win32.whl", hash = "sha256:3e1a800f988ce7c4917f34096f81585a73dbf65b5c39618b37926b1238cf9bc4"}, + {file = "aiohttp-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:ea510718a41b95c236c992b89fdfc3d04cc7ca60281f93aaada497c2b4e05c46"}, + {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6aaa6f99256dd1b5756a50891a20f0d252bd7bdb0854c5d440edab4495c9f973"}, + {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a27d8c70ad87bcfce2e97488652075a9bdd5b70093f50b10ae051dfe5e6baf37"}, + {file = "aiohttp-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:54287bcb74d21715ac8382e9de146d9442b5f133d9babb7e5d9e453faadd005e"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb3d05569aa83011fcb346b5266e00b04180105fcacc63743fc2e4a1862a891"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8534e7d69bb8e8d134fe2be9890d1b863518582f30c9874ed7ed12e48abe3c4"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd9d5b989d57b41e4ff56ab250c5ddf259f32db17159cce630fd543376bd96b"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa6904088e6642609981f919ba775838ebf7df7fe64998b1a954fb411ffb4663"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda42eb410be91b349fb4ee3a23a30ee301c391e503996a638d05659d76ea4c2"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:193cc1ccd69d819562cc7f345c815a6fc51d223b2ef22f23c1a0f67a88de9a72"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b9f1cb839b621f84a5b006848e336cf1496688059d2408e617af33e3470ba204"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d22a0931848b8c7a023c695fa2057c6aaac19085f257d48baa24455e67df97ec"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4112d8ba61fbd0abd5d43a9cb312214565b446d926e282a6d7da3f5a5aa71d36"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c4ad4241b52bb2eb7a4d2bde060d31c2b255b8c6597dd8deac2f039168d14fd7"}, + {file = "aiohttp-3.9.2-cp311-cp311-win32.whl", hash = "sha256:ee2661a3f5b529f4fc8a8ffee9f736ae054adfb353a0d2f78218be90617194b3"}, + {file = "aiohttp-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:4deae2c165a5db1ed97df2868ef31ca3cc999988812e82386d22937d9d6fed52"}, + {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6f4cdba12539215aaecf3c310ce9d067b0081a0795dd8a8805fdb67a65c0572a"}, + {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:84e843b33d5460a5c501c05539809ff3aee07436296ff9fbc4d327e32aa3a326"}, + {file = "aiohttp-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8008d0f451d66140a5aa1c17e3eedc9d56e14207568cd42072c9d6b92bf19b52"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61c47ab8ef629793c086378b1df93d18438612d3ed60dca76c3422f4fbafa792"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc71f748e12284312f140eaa6599a520389273174b42c345d13c7e07792f4f57"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1c3a4d0ab2f75f22ec80bca62385db2e8810ee12efa8c9e92efea45c1849133"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a87aa0b13bbee025faa59fa58861303c2b064b9855d4c0e45ec70182bbeba1b"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2cc0d04688b9f4a7854c56c18aa7af9e5b0a87a28f934e2e596ba7e14783192"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1956e3ac376b1711c1533266dec4efd485f821d84c13ce1217d53e42c9e65f08"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:114da29f39eccd71b93a0fcacff178749a5c3559009b4a4498c2c173a6d74dff"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3f17999ae3927d8a9a823a1283b201344a0627272f92d4f3e3a4efe276972fe8"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f31df6a32217a34ae2f813b152a6f348154f948c83213b690e59d9e84020925c"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7a75307ffe31329928a8d47eae0692192327c599113d41b278d4c12b54e1bd11"}, + {file = "aiohttp-3.9.2-cp312-cp312-win32.whl", hash = "sha256:972b63d589ff8f305463593050a31b5ce91638918da38139b9d8deaba9e0fed7"}, + {file = "aiohttp-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:200dc0246f0cb5405c80d18ac905c8350179c063ea1587580e3335bfc243ba6a"}, + {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:158564d0d1020e0d3fe919a81d97aadad35171e13e7b425b244ad4337fc6793a"}, + {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da1346cd0ccb395f0ed16b113ebb626fa43b7b07fd7344fce33e7a4f04a8897a"}, + {file = "aiohttp-3.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eaa9256de26ea0334ffa25f1913ae15a51e35c529a1ed9af8e6286dd44312554"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1543e7fb00214fb4ccead42e6a7d86f3bb7c34751ec7c605cca7388e525fd0b4"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:186e94570433a004e05f31f632726ae0f2c9dee4762a9ce915769ce9c0a23d89"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d52d20832ac1560f4510d68e7ba8befbc801a2b77df12bd0cd2bcf3b049e52a4"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c45e4e815ac6af3b72ca2bde9b608d2571737bb1e2d42299fc1ffdf60f6f9a1"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa906b9bdfd4a7972dd0628dbbd6413d2062df5b431194486a78f0d2ae87bd55"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:68bbee9e17d66f17bb0010aa15a22c6eb28583edcc8b3212e2b8e3f77f3ebe2a"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4c189b64bd6d9a403a1a3f86a3ab3acbc3dc41a68f73a268a4f683f89a4dec1f"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8a7876f794523123bca6d44bfecd89c9fec9ec897a25f3dd202ee7fc5c6525b7"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d23fba734e3dd7b1d679b9473129cd52e4ec0e65a4512b488981a56420e708db"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b141753be581fab842a25cb319f79536d19c2a51995d7d8b29ee290169868eab"}, + {file = "aiohttp-3.9.2-cp38-cp38-win32.whl", hash = "sha256:103daf41ff3b53ba6fa09ad410793e2e76c9d0269151812e5aba4b9dd674a7e8"}, + {file = "aiohttp-3.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:328918a6c2835861ff7afa8c6d2c70c35fdaf996205d5932351bdd952f33fa2f"}, + {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5264d7327c9464786f74e4ec9342afbbb6ee70dfbb2ec9e3dfce7a54c8043aa3"}, + {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07205ae0015e05c78b3288c1517afa000823a678a41594b3fdc870878d645305"}, + {file = "aiohttp-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0a1e638cffc3ec4d4784b8b4fd1cf28968febc4bd2718ffa25b99b96a741bd"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d43302a30ba1166325974858e6ef31727a23bdd12db40e725bec0f759abce505"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16a967685907003765855999af11a79b24e70b34dc710f77a38d21cd9fc4f5fe"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fa3ee92cd441d5c2d07ca88d7a9cef50f7ec975f0117cd0c62018022a184308"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b500c5ad9c07639d48615a770f49618130e61be36608fc9bc2d9bae31732b8f"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c07327b368745b1ce2393ae9e1aafed7073d9199e1dcba14e035cc646c7941bf"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc7d6502c23a0ec109687bf31909b3fb7b196faf198f8cff68c81b49eb316ea9"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:07be2be7071723c3509ab5c08108d3a74f2181d4964e869f2504aaab68f8d3e8"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:122468f6fee5fcbe67cb07014a08c195b3d4c41ff71e7b5160a7bcc41d585a5f"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:00a9abcea793c81e7f8778ca195a1714a64f6d7436c4c0bb168ad2a212627000"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a9825fdd64ecac5c670234d80bb52bdcaa4139d1f839165f548208b3779c6c6"}, + {file = "aiohttp-3.9.2-cp39-cp39-win32.whl", hash = "sha256:5422cd9a4a00f24c7244e1b15aa9b87935c85fb6a00c8ac9b2527b38627a9211"}, + {file = "aiohttp-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:7d579dcd5d82a86a46f725458418458fa43686f6a7b252f2966d359033ffc8ab"}, + {file = "aiohttp-3.9.2.tar.gz", hash = "sha256:b0ad0a5e86ce73f5368a164c10ada10504bf91869c05ab75d982c6048217fbf7"}, ] [package.dependencies] @@ -158,7 +158,7 @@ typing_extensions = {version = ">=4.0", markers = "python_version < \"3.10\""} name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, @@ -186,7 +186,7 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, @@ -1064,7 +1064,7 @@ Flask = ">=0.9" name = "frozenlist" version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, @@ -2149,7 +2149,7 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, @@ -4125,7 +4125,7 @@ files = [ name = "yarl" version = "1.9.4" description = "Yet another URL library" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, From f762a3b122899ff187150f0dcd12d75ca7f5c54b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 07:03:57 +0100 Subject: [PATCH 054/169] Build: Bump adlfs from 2023.12.0 to 2024.1.0 (#320) Bumps [adlfs](https://github.com/fsspec/adlfs) from 2023.12.0 to 2024.1.0. - [Release notes](https://github.com/fsspec/adlfs/releases) - [Changelog](https://github.com/fsspec/adlfs/blob/main/CHANGELOG.md) - [Commits](https://github.com/fsspec/adlfs/compare/2023.12.0...2024.1.0) --- updated-dependencies: - dependency-name: adlfs dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index a6d30ae24e..622215f5b9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "adlfs" -version = "2023.12.0" +version = "2024.1.0" description = "Access Azure Datalake Gen1 with fsspec and dask" optional = true python-versions = ">=3.8" files = [ - {file = "adlfs-2023.12.0-py3-none-any.whl", hash = "sha256:da6e391afd002c7bb1b75dcc286b78fdf5dbf29aca0f984462033df6311b4e7e"}, - {file = "adlfs-2023.12.0.tar.gz", hash = "sha256:a590694ed9f5a45741e82bff8bcf88c30a790da949310817330b5e7992b8a9e9"}, + {file = "adlfs-2024.1.0-py3-none-any.whl", hash = "sha256:7f5982785c7af34da879463ab2a046b0875116c83f97ed3b810e956244276764"}, + {file = "adlfs-2024.1.0.tar.gz", hash = "sha256:bece62abb8aa29e3a3dd2a5453de61133f481d743b6f65aab01ca1c1a0157b4c"}, ] [package.dependencies] @@ -4319,4 +4319,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "6d7f598c9ddb0e7565ef8af1b3270c9e5eb32718abfd0a3a7d5c192543150ba8" +content-hash = "5becf154a072fa31d9cefaee2adaacca1e2315579fd821a4567f41b075ed3b1d" diff --git a/pyproject.toml b/pyproject.toml index d1bc82dc62..f823155382 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,7 @@ thrift = { version = ">=0.13.0,<1.0.0", optional = true } mypy-boto3-glue = { version = ">=1.28.18", optional = true } boto3 = { version = ">=1.24.59", optional = true } s3fs = { version = ">=2023.1.0,<2024.1.0", optional = true } -adlfs = { version = ">=2023.1.0,<2024.1.0", optional = true } +adlfs = { version = ">=2023.1.0,<2024.2.0", optional = true } gcsfs = { version = ">=2023.1.0,<2024.1.0", optional = true } psycopg2-binary = { version = ">=2.9.6", optional = true } sqlalchemy = { version = "^2.0.18", optional = true } From c6a2fabb4716e21c4a883d43d44c89464acb6c15 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 07:04:05 +0100 Subject: [PATCH 055/169] Build: Bump pydantic from 2.5.3 to 2.6.0 (#317) Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.5.3 to 2.6.0. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v2.5.3...v2.6.0) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 198 +++++++++++++++++++++++----------------------------- 1 file changed, 86 insertions(+), 112 deletions(-) diff --git a/poetry.lock b/poetry.lock index 622215f5b9..f8d7a138e3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2798,18 +2798,18 @@ files = [ [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.0" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, + {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.1" typing-extensions = ">=4.6.1" [package.extras] @@ -2817,116 +2817,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.1" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, + {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, + {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, + {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, + {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, + {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, + {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, + {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, + {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, + {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, + {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, + {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, + {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, + {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, + {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, + {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, + {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, + {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, + {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, + {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, + {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, + {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, + {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, + {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, + {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, + {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, + {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, + {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, + {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, + {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, + {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, ] [package.dependencies] From 7b4aff17a9ee186a9df7c5b23cb3f9e10428f819 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 07:04:12 +0100 Subject: [PATCH 056/169] Build: Bump mkdocs-material from 9.5.5 to 9.5.6 (#318) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.5 to 9.5.6. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.5...9.5.6) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index fd5c182572..dc0db9763a 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -23,6 +23,6 @@ mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 -mkdocs-material==9.5.5 +mkdocs-material==9.5.6 mkdocs-material-extensions==1.3.1 mkdocs-section-index==0.3.8 From 4f62d43c8e20022679e8651c6f2f547535940fad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 07:04:29 +0100 Subject: [PATCH 057/169] Build: Bump pypa/cibuildwheel from 2.16.3 to 2.16.4 (#322) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.3 to 2.16.4. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.16.3...v2.16.4) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 219434664e..ad8f74a692 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -59,7 +59,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu') - name: Build wheels - uses: pypa/cibuildwheel@v2.16.3 + uses: pypa/cibuildwheel@v2.16.4 with: output-dir: wheelhouse config-file: "pyproject.toml" From 2836c4a85708d4e00fe1fa35f880728dfe074b60 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 30 Jan 2024 08:51:30 +0100 Subject: [PATCH 058/169] Small getting started guide on writes (#311) --- mkdocs/docs/SUMMARY.md | 2 +- mkdocs/docs/contributing.md | 16 ++++ mkdocs/docs/index.md | 144 +++++++++++++++++++++++++++++------- pyiceberg/table/__init__.py | 6 +- tests/test_schema.py | 21 ++++++ 5 files changed, 159 insertions(+), 30 deletions(-) diff --git a/mkdocs/docs/SUMMARY.md b/mkdocs/docs/SUMMARY.md index 5504d2ac7b..40ba0bffd7 100644 --- a/mkdocs/docs/SUMMARY.md +++ b/mkdocs/docs/SUMMARY.md @@ -17,7 +17,7 @@ -- [Home](index.md) +- [Getting started](index.md) - [Configuration](configuration.md) - [CLI](cli.md) - [API](api.md) diff --git a/mkdocs/docs/contributing.md b/mkdocs/docs/contributing.md index 8ec6dcb2d2..7411382d2c 100644 --- a/mkdocs/docs/contributing.md +++ b/mkdocs/docs/contributing.md @@ -58,6 +58,22 @@ For IDEA ≤2021 you need to install the [Poetry integration as a plugin](https: Now you're set using Poetry, and all the tests will run in Poetry, and you'll have syntax highlighting in the pyproject.toml to indicate stale dependencies. +## Installation from source + +Clone the repository for local development: + +```sh +git clone https://github.com/apache/iceberg-python.git +cd iceberg-python +pip3 install -e ".[s3fs,hive]" +``` + +Install it directly for GitHub (not recommended), but sometimes handy: + +``` +pip install "git+https://github.com/apache/iceberg-python.git#egg=pyiceberg[s3fs]" +``` + ## Linting `pre-commit` is used for autoformatting and linting: diff --git a/mkdocs/docs/index.md b/mkdocs/docs/index.md index 628f4f7dd4..d82aa658eb 100644 --- a/mkdocs/docs/index.md +++ b/mkdocs/docs/index.md @@ -20,11 +20,11 @@ hide: - limitations under the License. --> -# PyIceberg +# Getting started with PyIceberg PyIceberg is a Python implementation for accessing Iceberg tables, without the need of a JVM. -## Install +## Installation Before installing PyIceberg, make sure that you're on an up-to-date version of `pip`: @@ -38,36 +38,126 @@ You can install the latest release version from pypi: pip install "pyiceberg[s3fs,hive]" ``` -Install it directly for GitHub (not recommended), but sometimes handy: +You can mix and match optional dependencies depending on your needs: + +| Key | Description: | +| ------------ | -------------------------------------------------------------------- | +| hive | Support for the Hive metastore | +| glue | Support for AWS Glue | +| dynamodb | Support for AWS DynamoDB | +| sql-postgres | Support for SQL Catalog backed by Postgresql | +| sql-sqlite | Support for SQL Catalog backed by SQLite | +| pyarrow | PyArrow as a FileIO implementation to interact with the object store | +| pandas | Installs both PyArrow and Pandas | +| duckdb | Installs both PyArrow and DuckDB | +| ray | Installs PyArrow, Pandas, and Ray | +| s3fs | S3FS as a FileIO implementation to interact with the object store | +| adlfs | ADLFS as a FileIO implementation to interact with the object store | +| snappy | Support for snappy Avro compression | +| gcs | GCS as the FileIO implementation to interact with the object store | + +You either need to install `s3fs`, `adlfs`, `gcs`, or `pyarrow` to be able to fetch files from an object store. + +## Connecting to a catalog + +Iceberg leverages the [catalog to have one centralized place to organize the tables](https://iceberg.apache.org/catalog/). This can be a traditional Hive catalog to store your Iceberg tables next to the rest, a vendor solution like the AWS Glue catalog, or an implementation of Icebergs' own [REST protocol](https://github.com/apache/iceberg/tree/main/open-api). Checkout the [configuration](configuration.md) page to find all the configuration details. + +## Write a PyArrow dataframe + +Let's take the Taxi dataset, and write this to an Iceberg table. + +First download one month of data: + +```shell +curl https://d37ci6vzurychx.cloudfront.net/trip-data/yellow_tripdata_2023-01.parquet -o /tmp/yellow_tripdata_2023-01.parquet +``` + +Load it into your PyArrow dataframe: + +```python +import pyarrow.parquet as pq +df = pq.read_table("/tmp/yellow_tripdata_2023-01.parquet") ``` -pip install "git+https://github.com/apache/iceberg-python.git#egg=pyiceberg[s3fs]" + +Create a new Iceberg table: + +```python +from pyiceberg.catalog import load_catalog + +catalog = load_catalog("default") + +table = catalog.create_table( + "default.taxi_dataset", + schema=df.schema, +) ``` -Or clone the repository for local development: +Append the dataframe to the table: -```sh -git clone https://github.com/apache/iceberg-python.git -cd iceberg-python -pip3 install -e ".[s3fs,hive]" +```python +table.append(df) +len(table.scan().to_arrow()) ``` -You can mix and match optional dependencies depending on your needs: +3066766 rows have been written to the table. + +Now generate a tip-per-mile feature to train the model on: + +```python +import pyarrow.compute as pc + +df = df.append_column("tip_per_mile", pc.divide(df["tip_amount"], df["trip_distance"])) +``` + +Evolve the schema of the table with the new column: + +```python +with table.update_schema() as update_schema: + update_schema.union_by_name(df.schema) +``` + +And now we can write the new dataframe to the Iceberg table: + +```python +table.overwrite(df) +print(table.scan().to_arrow()) +``` + +And the new column is there: + +``` +taxi_dataset( + 1: VendorID: optional long, + 2: tpep_pickup_datetime: optional timestamp, + 3: tpep_dropoff_datetime: optional timestamp, + 4: passenger_count: optional double, + 5: trip_distance: optional double, + 6: RatecodeID: optional double, + 7: store_and_fwd_flag: optional string, + 8: PULocationID: optional long, + 9: DOLocationID: optional long, + 10: payment_type: optional long, + 11: fare_amount: optional double, + 12: extra: optional double, + 13: mta_tax: optional double, + 14: tip_amount: optional double, + 15: tolls_amount: optional double, + 16: improvement_surcharge: optional double, + 17: total_amount: optional double, + 18: congestion_surcharge: optional double, + 19: airport_fee: optional double, + 20: tip_per_mile: optional double +), +``` + +And we can see that 2371784 rows have a tip-per-mile: + +```python +df = table.scan(row_filter="tip_per_mile > 0").to_arrow() +len(df) +``` + +## More details -| Key | Description: | -| -------- | -------------------------------------------------------------------- | -| hive | Support for the Hive metastore | -| glue | Support for AWS Glue | -| dynamodb | Support for AWS DynamoDB | -| pyarrow | PyArrow as a FileIO implementation to interact with the object store | -| pandas | Installs both PyArrow and Pandas | -| duckdb | Installs both PyArrow and DuckDB | -| ray | Installs PyArrow, Pandas, and Ray | -| s3fs | S3FS as a FileIO implementation to interact with the object store | -| adlfs | ADLFS as a FileIO implementation to interact with the object store | -| snappy | Support for snappy Avro compression | -| gcs | GCS as the FileIO implementation to interact with the object store | - -You either need to install `s3fs`, `adlfs`, `gcs`, or `pyarrow` for fetching files. - -There is both a [CLI](cli.md) and [Python API](api.md) available. +For the details, please check the [CLI](cli.md) or [Python API](api.md) page. diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 26eecefd0f..16ed9ed292 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -1477,9 +1477,11 @@ def case_sensitive(self, case_sensitive: bool) -> UpdateSchema: self._case_sensitive = case_sensitive return self - def union_by_name(self, new_schema: Schema) -> UpdateSchema: + def union_by_name(self, new_schema: Union[Schema, "pa.Schema"]) -> UpdateSchema: + from pyiceberg.catalog import Catalog + visit_with_partner( - new_schema, + Catalog._convert_schema_if_needed(new_schema), -1, UnionByNameVisitor(update_schema=self, existing_schema=self._schema, case_sensitive=self._case_sensitive), # type: ignore PartnerIdByNameAccessor(partner_schema=self._schema, case_sensitive=self._case_sensitive), diff --git a/tests/test_schema.py b/tests/test_schema.py index a5487b7fd9..cfee6e7f14 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -18,6 +18,7 @@ from textwrap import dedent from typing import Any, Dict, List +import pyarrow as pa import pytest from pyiceberg import schema @@ -1579,3 +1580,23 @@ def test_append_nested_lists() -> None: ) assert union.as_struct() == expected.as_struct() + + +def test_union_with_pa_schema(primitive_fields: NestedField) -> None: + base_schema = Schema(NestedField(field_id=1, name="foo", field_type=StringType(), required=True)) + + pa_schema = pa.schema([ + pa.field("foo", pa.string(), nullable=False), + pa.field("bar", pa.int32(), nullable=True), + pa.field("baz", pa.bool_(), nullable=True), + ]) + + new_schema = UpdateSchema(None, schema=base_schema).union_by_name(pa_schema)._apply() + + expected_schema = Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=True), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=False), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + ) + + assert new_schema == expected_schema From 43559128e9724eadf6b3523e1432e2801af03ae9 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 30 Jan 2024 18:48:40 +0100 Subject: [PATCH 059/169] Use `.read_bytes()` instead (#325) --- pyiceberg/avro/file.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyiceberg/avro/file.py b/pyiceberg/avro/file.py index a8befd9f91..2f21e165b4 100644 --- a/pyiceberg/avro/file.py +++ b/pyiceberg/avro/file.py @@ -194,8 +194,7 @@ def _read_block(self) -> int: raise ValueError(f"Expected sync bytes {self.header.sync!r}, but got {sync_marker!r}") block_records = self.decoder.read_int() - block_bytes_len = self.decoder.read_int() - block_bytes = self.decoder.read(block_bytes_len) + block_bytes = self.decoder.read_bytes() if codec := self.header.compression_codec(): block_bytes = codec.decompress(block_bytes) From f69b2317bd28ae32e79d4487c7213f1b3b6ad469 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 30 Jan 2024 20:21:21 +0100 Subject: [PATCH 060/169] REST: Set fresh IDs on create-table (#327) For the other implementation we do this when writing the metadata, but for the REST catalog we just post the schema itself. The REST spec does not mention anything about setting fresh IDs, so it is best to do it ourselves. --- pyiceberg/catalog/rest.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 34d75b5936..765f04b128 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -58,7 +58,7 @@ UnauthorizedError, ) from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec -from pyiceberg.schema import Schema +from pyiceberg.schema import Schema, assign_fresh_schema_ids from pyiceberg.table import ( CommitTableRequest, CommitTableResponse, @@ -447,13 +447,14 @@ def create_table( sort_order: SortOrder = UNSORTED_SORT_ORDER, properties: Properties = EMPTY_DICT, ) -> Table: - schema: Schema = self._convert_schema_if_needed(schema) # type: ignore + iceberg_schema = self._convert_schema_if_needed(schema) + iceberg_schema = assign_fresh_schema_ids(iceberg_schema) namespace_and_table = self._split_identifier_for_path(identifier) request = CreateTableRequest( name=namespace_and_table["table"], location=location, - table_schema=schema, + table_schema=iceberg_schema, partition_spec=partition_spec, write_order=sort_order, properties=properties, From 7f7bb03a98facb0869c1ede52320780184f3746c Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 30 Jan 2024 21:36:28 +0100 Subject: [PATCH 061/169] Add PyArrow for the Cython decoder tests (#329) We now include PyArrow in the `conftest.py`. --- .github/workflows/python-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index ad8f74a692..b591da2f9b 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -68,7 +68,7 @@ jobs: CIBW_ARCHS: "auto64" CIBW_PROJECT_REQUIRES_PYTHON: ">=3.8,<3.12" CIBW_TEST_REQUIRES: "pytest==7.4.2 moto==4.2.2" - CIBW_TEST_EXTRAS: "s3fs,glue" + CIBW_TEST_EXTRAS: "s3fs,glue,pyarrow" CIBW_TEST_COMMAND: "pytest {project}/tests/avro/test_decoder.py" # There is an upstream issue with installing on MacOSX # https://github.com/pypa/cibuildwheel/issues/1603 From a750b4bf75cb960c272a9e3b8530fb45ff5f9c09 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 30 Jan 2024 22:42:32 +0100 Subject: [PATCH 062/169] Remove `MotoServer` from the `conftest.py` scope (#331) Otherwise we need to have this installed for the tests of the Avro Decoder --- tests/conftest.py | 33 +++++++++++---------------------- 1 file changed, 11 insertions(+), 22 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d9a8dfdf07..86c7f1cd33 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -40,7 +40,6 @@ Generator, List, Optional, - Union, ) from urllib.parse import urlparse @@ -48,7 +47,6 @@ import pyarrow as pa import pytest from moto import mock_dynamodb, mock_glue -from moto.server import ThreadedMotoServer # type: ignore from pyiceberg import schema from pyiceberg.catalog import Catalog @@ -87,7 +85,7 @@ from pyiceberg.utils.datetime import datetime_to_millis if TYPE_CHECKING: - from pytest import ExitCode, Session + from moto.server import ThreadedMotoServer # type: ignore from pyiceberg.io.pyarrow import PyArrowFile, PyArrowFileIO @@ -1761,32 +1759,23 @@ def fixture_aws_credentials() -> Generator[None, None, None]: os.environ.pop("AWS_DEFAULT_REGION") -MOTO_SERVER = ThreadedMotoServer(ip_address="localhost", port=5001) - - -def pytest_sessionfinish( - session: "Session", - exitstatus: Union[int, "ExitCode"], -) -> None: - if MOTO_SERVER._server_ready: - MOTO_SERVER.stop() - - @pytest.fixture(scope="session") -def moto_server() -> ThreadedMotoServer: - # this will throw an exception if the port is already in use - is_port_in_use(MOTO_SERVER._ip_address, MOTO_SERVER._port) - MOTO_SERVER.start() - return MOTO_SERVER +def moto_server() -> "ThreadedMotoServer": + from moto.server import ThreadedMotoServer + server = ThreadedMotoServer(ip_address="localhost", port=5001) -def is_port_in_use(ip_address: str, port: int) -> None: + # this will throw an exception if the port is already in use with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind((ip_address, port)) + s.bind((server._ip_address, server._port)) + + server.start() + yield server + server.stop() @pytest.fixture(scope="session") -def moto_endpoint_url(moto_server: ThreadedMotoServer) -> str: +def moto_endpoint_url(moto_server: "ThreadedMotoServer") -> str: _url = f"http://{moto_server._ip_address}:{moto_server._port}" return _url From 8a2a00e6e922177ddb0e1bdbd2a69aeaf3abc060 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 23:38:48 +0100 Subject: [PATCH 063/169] Build: Bump griffe from 0.39.1 to 0.40.0 (#333) Bumps [griffe](https://github.com/mkdocstrings/griffe) from 0.39.1 to 0.40.0. - [Release notes](https://github.com/mkdocstrings/griffe/releases) - [Changelog](https://github.com/mkdocstrings/griffe/blob/main/CHANGELOG.md) - [Commits](https://github.com/mkdocstrings/griffe/compare/0.39.1...0.40.0) --- updated-dependencies: - dependency-name: griffe dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index dc0db9763a..c89ac82c74 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -16,7 +16,7 @@ # under the License. mkdocs==1.5.3 -griffe==0.39.1 +griffe==0.40.0 jinja2==3.1.3 mkdocstrings==0.24.0 mkdocstrings-python==1.8.0 From 102e043b182a0b7bf4c9f66c5d77b24eedbd3766 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 31 Jan 2024 05:49:18 +0100 Subject: [PATCH 064/169] The name needs to be unique per run (#332) --- .github/workflows/python-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index b591da2f9b..52f523da93 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -82,5 +82,5 @@ jobs: - uses: actions/upload-artifact@v4 with: - name: "release-${{ github.event.inputs.version }}" + name: "release-${{ matrix.os }}-${{ github.event.inputs.version }}" path: ./wheelhouse/* From b7cf14e3b195064d8740b8f2ab306d8729e54dc2 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 31 Jan 2024 08:55:52 +0100 Subject: [PATCH 065/169] Allow running the Avro decoder tests without Arrow (#335) * Allow running the Avro decoder tests without Arrow It looks like PyArrow binaries are not available for all architectures, and therefore it tries to compile it locally when it isn't available. We want to avoid this since Arrow requires a rather complicated buildchain. * Revert unnecessary move --- .github/workflows/python-release.yml | 2 +- tests/conftest.py | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 52f523da93..2d51aa3d17 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -68,7 +68,7 @@ jobs: CIBW_ARCHS: "auto64" CIBW_PROJECT_REQUIRES_PYTHON: ">=3.8,<3.12" CIBW_TEST_REQUIRES: "pytest==7.4.2 moto==4.2.2" - CIBW_TEST_EXTRAS: "s3fs,glue,pyarrow" + CIBW_TEST_EXTRAS: "s3fs,glue" CIBW_TEST_COMMAND: "pytest {project}/tests/avro/test_decoder.py" # There is an upstream issue with installing on MacOSX # https://github.com/pypa/cibuildwheel/issues/1603 diff --git a/tests/conftest.py b/tests/conftest.py index 86c7f1cd33..063c0646fd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -44,7 +44,6 @@ from urllib.parse import urlparse import boto3 -import pyarrow as pa import pytest from moto import mock_dynamodb, mock_glue @@ -85,6 +84,7 @@ from pyiceberg.utils.datetime import datetime_to_millis if TYPE_CHECKING: + import pyarrow as pa from moto.server import ThreadedMotoServer # type: ignore from pyiceberg.io.pyarrow import PyArrowFile, PyArrowFileIO @@ -267,7 +267,9 @@ def table_schema_nested_with_struct_key_map() -> Schema: @pytest.fixture(scope="session") -def pyarrow_schema_simple_without_ids() -> pa.Schema: +def pyarrow_schema_simple_without_ids() -> "pa.Schema": + import pyarrow as pa + return pa.schema([ pa.field('foo', pa.string(), nullable=True), pa.field('bar', pa.int32(), nullable=False), @@ -276,7 +278,9 @@ def pyarrow_schema_simple_without_ids() -> pa.Schema: @pytest.fixture(scope="session") -def pyarrow_schema_nested_without_ids() -> pa.Schema: +def pyarrow_schema_nested_without_ids() -> "pa.Schema": + import pyarrow as pa + return pa.schema([ pa.field('foo', pa.string(), nullable=False), pa.field('bar', pa.int32(), nullable=False), From a3cdba5a598b3088c4728bc841b4f59183e5da31 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 08:28:20 +0100 Subject: [PATCH 066/169] Build: Bump pypa/cibuildwheel from 2.16.4 to 2.16.5 (#343) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.4 to 2.16.5. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.16.4...v2.16.5) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 2d51aa3d17..03e27f012e 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -59,7 +59,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu') - name: Build wheels - uses: pypa/cibuildwheel@v2.16.4 + uses: pypa/cibuildwheel@v2.16.5 with: output-dir: wheelhouse config-file: "pyproject.toml" From b1e33d48bc2c06a812a59c14e37ef73da9968f50 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 08:28:57 +0100 Subject: [PATCH 067/169] Build: Bump mypy-boto3-glue from 1.34.7 to 1.34.32 (#342) Bumps [mypy-boto3-glue](https://github.com/youtype/mypy_boto3_builder) from 1.34.7 to 1.34.32. - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) --- updated-dependencies: - dependency-name: mypy-boto3-glue dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/poetry.lock b/poetry.lock index f8d7a138e3..ff67dd28c6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -48,7 +48,7 @@ boto3 = ["boto3 (>=1.33.2,<1.34.28)"] name = "aiohttp" version = "3.9.2" description = "Async http client/server framework (asyncio)" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:772fbe371788e61c58d6d3d904268e48a594ba866804d08c995ad71b144f94cb"}, @@ -158,7 +158,7 @@ typing_extensions = {version = ">=4.0", markers = "python_version < \"3.10\""} name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, @@ -186,7 +186,7 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, @@ -1064,7 +1064,7 @@ Flask = ">=0.9" name = "frozenlist" version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, @@ -2149,7 +2149,7 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, @@ -2230,13 +2230,13 @@ files = [ [[package]] name = "mypy-boto3-glue" -version = "1.34.7" -description = "Type annotations for boto3.Glue 1.34.7 service generated with mypy-boto3-builder 7.23.0" +version = "1.34.32" +description = "Type annotations for boto3.Glue 1.34.32 service generated with mypy-boto3-builder 7.23.1" optional = true python-versions = ">=3.8" files = [ - {file = "mypy-boto3-glue-1.34.7.tar.gz", hash = "sha256:057a082527e3884ec72b499573094af9dbe0bd3639b26772ec883a17d2c22d12"}, - {file = "mypy_boto3_glue-1.34.7-py3-none-any.whl", hash = "sha256:59e3cced5c3367f0eb2136b62efeb7914f2779a17e574e3c965d5567c7c63de1"}, + {file = "mypy-boto3-glue-1.34.32.tar.gz", hash = "sha256:f23fddd06613d699ff7a22bb4fa79478c903e07cd5dd32e70278da49d6212e65"}, + {file = "mypy_boto3_glue-1.34.32-py3-none-any.whl", hash = "sha256:7a1a0e4dfab2870cff5b0984c39d3fa4fd1820ea6fb40f807b7150e4d624d9c9"}, ] [package.dependencies] @@ -4099,7 +4099,7 @@ files = [ name = "yarl" version = "1.9.4" description = "Yet another URL library" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, From 29db67fef819f2df0b30ea7decb10537f5a22eef Mon Sep 17 00:00:00 2001 From: Hendrik Makait Date: Thu, 1 Feb 2024 10:18:42 +0100 Subject: [PATCH 068/169] Create namespace from CLI (#336) --- pyiceberg/cli/console.py | 21 +++++++++++++++++++-- tests/cli/test_console.py | 16 ++++++++++++++++ 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/pyiceberg/cli/console.py b/pyiceberg/cli/console.py index 092910b5f6..095c1ef6dc 100644 --- a/pyiceberg/cli/console.py +++ b/pyiceberg/cli/console.py @@ -206,6 +206,23 @@ def version(ctx: Context) -> None: ctx.obj["output"].version(__version__) +@run.group() +def create() -> None: + """Operation to create a namespace.""" + + +@create.command() +@click.argument("identifier") +@click.pass_context +@catch_exception() +def namespace(ctx: Context, identifier: str) -> None: + """Create a namespace.""" + catalog, output = _catalog_and_output(ctx) + + catalog.create_namespace(identifier) + output.text(f"Created namespace: {identifier}") + + @run.group() def drop() -> None: """Operations to drop a namespace or table.""" @@ -223,11 +240,11 @@ def table(ctx: Context, identifier: str) -> None: # noqa: F811 output.text(f"Dropped table: {identifier}") -@drop.command() +@drop.command() # type: ignore @click.argument("identifier") @click.pass_context @catch_exception() -def namespace(ctx: Context, identifier: str) -> None: +def namespace(ctx: Context, identifier: str) -> None: # noqa: F811 """Drop a namespace.""" catalog, output = _catalog_and_output(ctx) diff --git a/tests/cli/test_console.py b/tests/cli/test_console.py index 5eec231cca..d77b290ec6 100644 --- a/tests/cli/test_console.py +++ b/tests/cli/test_console.py @@ -308,6 +308,22 @@ def test_drop_namespace_does_not_exists(catalog: InMemoryCatalog) -> None: assert result.output == "Namespace does not exist: ('doesnotexist',)\n" +def test_create_namespace(catalog: InMemoryCatalog) -> None: + runner = CliRunner() + result = runner.invoke(run, ["create", "namespace", TEST_TABLE_NAMESPACE]) + assert result.exit_code == 0 + assert result.output == """Created namespace: default\n""" + + +def test_create_namespace_already_exists(catalog: InMemoryCatalog) -> None: + catalog.create_namespace(TEST_TABLE_NAMESPACE) + + runner = CliRunner() + result = runner.invoke(run, ["create", "namespace", TEST_TABLE_NAMESPACE]) + assert result.exit_code == 1 + assert result.output == "Namespace already exists: ('default',)\n" + + def test_rename_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, From 510915b36325bb4788941d0a974bb8bc6177e627 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 1 Feb 2024 20:39:04 +0100 Subject: [PATCH 069/169] Remove sort-order constraint (#349) --- pyiceberg/io/pyarrow.py | 5 ++++- pyiceberg/table/__init__.py | 9 --------- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 7a94ce4c7d..9e05e2dd4d 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -1735,7 +1735,10 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: file_format=FileFormat.PARQUET, partition=Record(), file_size_in_bytes=len(fo), - sort_order_id=task.sort_order_id, + # After this has been fixed: + # https://github.com/apache/iceberg-python/issues/271 + # sort_order_id=task.sort_order_id, + sort_order_id=None, # Just copy these from the table for now spec_id=table.spec().spec_id, equality_ids=None, diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 16ed9ed292..e3975dcac5 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -943,9 +943,6 @@ def append(self, df: pa.Table) -> None: if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - if len(self.sort_order().fields) > 0: - raise ValueError("Cannot write to tables with a sort-order") - data_files = _dataframe_to_data_files(self, df=df) merge = _MergingSnapshotProducer(operation=Operation.APPEND, table=self) for data_file in data_files: @@ -976,9 +973,6 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - if len(self.sort_order().fields) > 0: - raise ValueError("Cannot write to tables with a sort-order") - data_files = _dataframe_to_data_files(self, df=df) merge = _MergingSnapshotProducer( operation=Operation.OVERWRITE if self.current_snapshot() is not None else Operation.APPEND, @@ -2279,9 +2273,6 @@ def _dataframe_to_data_files(table: Table, df: pa.Table) -> Iterable[DataFile]: if len(table.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - if len(table.sort_order().fields) > 0: - raise ValueError("Cannot write to tables with a sort-order") - write_uuid = uuid.uuid4() counter = itertools.count(0) From a983935645fdf6937badb49087b4b6de3b91baa0 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 1 Feb 2024 17:23:02 -0500 Subject: [PATCH 070/169] Bug fix for writing empty df or null only columns (#350) * bug fix for writing empty df or null only cols * overwrite --- pyiceberg/io/pyarrow.py | 5 +- pyiceberg/table/__init__.py | 17 ++-- tests/integration/test_writes.py | 162 ++++++++++++++++++++++++++++++- 3 files changed, 174 insertions(+), 10 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 9e05e2dd4d..38d7d4af4c 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -1339,7 +1339,10 @@ def update_min(self, val: Any) -> None: def update_max(self, val: Any) -> None: self.current_max = val if self.current_max is None else max(val, self.current_max) - def min_as_bytes(self) -> bytes: + def min_as_bytes(self) -> Optional[bytes]: + if self.current_min is None: + return None + return self.serialize( self.current_min if self.trunc_length is None diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index e3975dcac5..7692bb0bee 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -943,10 +943,13 @@ def append(self, df: pa.Table) -> None: if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - data_files = _dataframe_to_data_files(self, df=df) merge = _MergingSnapshotProducer(operation=Operation.APPEND, table=self) - for data_file in data_files: - merge.append_data_file(data_file) + + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files(self, df=df) + for data_file in data_files: + merge.append_data_file(data_file) merge.commit() @@ -973,14 +976,16 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - data_files = _dataframe_to_data_files(self, df=df) merge = _MergingSnapshotProducer( operation=Operation.OVERWRITE if self.current_snapshot() is not None else Operation.APPEND, table=self, ) - for data_file in data_files: - merge.append_data_file(data_file) + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files(self, df=df) + for data_file in data_files: + merge.append_data_file(data_file) merge.commit() diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index 17dc997163..c8552a2e96 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -119,9 +119,8 @@ def session_catalog() -> Catalog: @pytest.fixture(scope="session") -def arrow_table_with_null() -> pa.Table: - """PyArrow table with all kinds of columns""" - pa_schema = pa.schema([ +def pa_schema() -> pa.Schema: + return pa.schema([ ("bool", pa.bool_()), ("string", pa.string()), ("string_long", pa.string()), @@ -139,9 +138,26 @@ def arrow_table_with_null() -> pa.Table: ("binary", pa.binary()), ("fixed", pa.binary(16)), ]) + + +@pytest.fixture(scope="session") +def arrow_table_with_null(pa_schema: pa.Schema) -> pa.Table: + """PyArrow table with all kinds of columns""" return pa.Table.from_pydict(TEST_DATA_WITH_NULL, schema=pa_schema) +@pytest.fixture(scope="session") +def arrow_table_without_data(pa_schema: pa.Schema) -> pa.Table: + """PyArrow table with all kinds of columns""" + return pa.Table.from_pylist([], schema=pa_schema) + + +@pytest.fixture(scope="session") +def arrow_table_with_only_nulls(pa_schema: pa.Schema) -> pa.Table: + """PyArrow table with all kinds of columns""" + return pa.Table.from_pylist([{}, {}], schema=pa_schema) + + @pytest.fixture(scope="session", autouse=True) def table_v1_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_table_v1_with_null" @@ -157,6 +173,36 @@ def table_v1_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" +@pytest.fixture(scope="session", autouse=True) +def table_v1_without_data(session_catalog: Catalog, arrow_table_without_data: pa.Table) -> None: + identifier = "default.arrow_table_v1_without_data" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + tbl.append(arrow_table_without_data) + + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v1_with_only_nulls(session_catalog: Catalog, arrow_table_with_only_nulls: pa.Table) -> None: + identifier = "default.arrow_table_v1_with_only_nulls" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + tbl.append(arrow_table_with_only_nulls) + + assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" + + @pytest.fixture(scope="session", autouse=True) def table_v1_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_table_v1_appended_with_null" @@ -189,6 +235,36 @@ def table_v2_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" +@pytest.fixture(scope="session", autouse=True) +def table_v2_without_data(session_catalog: Catalog, arrow_table_without_data: pa.Table) -> None: + identifier = "default.arrow_table_v2_without_data" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '2'}) + tbl.append(arrow_table_without_data) + + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + +@pytest.fixture(scope="session", autouse=True) +def table_v2_with_only_nulls(session_catalog: Catalog, arrow_table_with_only_nulls: pa.Table) -> None: + identifier = "default.arrow_table_v2_with_only_nulls" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '2'}) + tbl.append(arrow_table_with_only_nulls) + + assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" + + @pytest.fixture(scope="session", autouse=True) def table_v2_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_table_v2_appended_with_null" @@ -279,6 +355,26 @@ def test_query_filter_null(spark: SparkSession, col: str, format_version: int) - assert df.where(f"{col} is not null").count() == 2, f"Expected 2 rows for {col}" +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_filter_without_data(spark: SparkSession, col: str, format_version: int) -> None: + identifier = f"default.arrow_table_v{format_version}_without_data" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 0, f"Expected 0 row for {col}" + assert df.where(f"{col} is not null").count() == 0, f"Expected 0 rows for {col}" + + +@pytest.mark.integration +@pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_query_filter_only_nulls(spark: SparkSession, col: str, format_version: int) -> None: + identifier = f"default.arrow_table_v{format_version}_with_only_nulls" + df = spark.table(identifier) + assert df.where(f"{col} is null").count() == 2, f"Expected 2 row for {col}" + assert df.where(f"{col} is not null").count() == 0, f"Expected 0 rows for {col}" + + @pytest.mark.integration @pytest.mark.parametrize("col", TEST_DATA_WITH_NULL.keys()) @pytest.mark.parametrize("format_version", [1, 2]) @@ -409,3 +505,63 @@ def test_invalid_arguments(spark: SparkSession, session_catalog: Catalog, arrow_ with pytest.raises(ValueError, match="Expected PyArrow table, got: not a df"): tbl.append("not a df") + + +@pytest.mark.integration +def test_summaries_with_only_nulls( + spark: SparkSession, session_catalog: Catalog, arrow_table_without_data: pa.Table, arrow_table_with_only_nulls: pa.Table +) -> None: + identifier = "default.arrow_table_summaries_with_only_nulls" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + + tbl.append(arrow_table_without_data) + tbl.append(arrow_table_with_only_nulls) + tbl.overwrite(arrow_table_without_data) + + rows = spark.sql( + f""" + SELECT operation, summary + FROM {identifier}.snapshots + ORDER BY committed_at ASC + """ + ).collect() + + operations = [row.operation for row in rows] + assert operations == ['append', 'append', 'overwrite'] + + summaries = [row.summary for row in rows] + + assert summaries[0] == { + 'total-data-files': '0', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '0', + 'total-position-deletes': '0', + 'total-records': '0', + } + + assert summaries[1] == { + 'added-data-files': '1', + 'added-files-size': '4045', + 'added-records': '2', + 'total-data-files': '1', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '4045', + 'total-position-deletes': '0', + 'total-records': '2', + } + + assert summaries[0] == { + 'total-data-files': '0', + 'total-delete-files': '0', + 'total-equality-deletes': '0', + 'total-files-size': '0', + 'total-position-deletes': '0', + 'total-records': '0', + } From a4856bc2eadf90ac85dec96d4502ca3517bb1bb5 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 2 Feb 2024 03:02:56 +0100 Subject: [PATCH 071/169] Small simplification in the name-mapping (#351) --- pyiceberg/io/pyarrow.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 38d7d4af4c..3e056b12b4 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -811,12 +811,9 @@ def __init__(self, name_mapping: Optional[NameMapping] = None) -> None: self._field_names = [] self._name_mapping = name_mapping - def _current_path(self) -> str: - return ".".join(self._field_names) - def _field_id(self, field: pa.Field) -> int: if self._name_mapping: - return self._name_mapping.find(self._current_path()).field_id + return self._name_mapping.find(*self._field_names).field_id elif (field_id := _get_field_id(field)) is not None: return field_id else: From fa1587788dbbb73b8a66a0a59ff7a8083b764707 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Sun, 4 Feb 2024 13:29:36 -0800 Subject: [PATCH 072/169] docs: Add `sqlcatalog` and local fs warehouse (#361) * add sqlcatalog and local fs warehouse * make lint * Apply suggestions from code review Co-authored-by: Fokko Driesprong --------- Co-authored-by: Fokko Driesprong --- mkdocs/docs/index.md | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/mkdocs/docs/index.md b/mkdocs/docs/index.md index d82aa658eb..f1c1fa9f51 100644 --- a/mkdocs/docs/index.md +++ b/mkdocs/docs/index.md @@ -62,6 +62,29 @@ You either need to install `s3fs`, `adlfs`, `gcs`, or `pyarrow` to be able to fe Iceberg leverages the [catalog to have one centralized place to organize the tables](https://iceberg.apache.org/catalog/). This can be a traditional Hive catalog to store your Iceberg tables next to the rest, a vendor solution like the AWS Glue catalog, or an implementation of Icebergs' own [REST protocol](https://github.com/apache/iceberg/tree/main/open-api). Checkout the [configuration](configuration.md) page to find all the configuration details. +For the sake of demonstration, we'll configure the catalog to use the `SqlCatalog` implementation, which will store information in a local `sqlite` database. We'll also configure the catalog to store data files in the local filesystem instead of an object store. This should not be used in production due to the limited scalability. + +Create a temporary location for Iceberg: + +```shell +mkdir /tmp/warehouse +``` + +Open a Python 3 REPL to set up the catalog: + +```python +from pyiceberg.catalog.sql import SqlCatalog + +warehouse_path = "/tmp/warehouse" +catalog = SqlCatalog( + "default", + **{ + "uri": f"sqlite:///{warehouse_path}/pyiceberg_catalog.db", + "warehouse": f"file://{warehouse_path}", + }, +) +``` + ## Write a PyArrow dataframe Let's take the Taxi dataset, and write this to an Iceberg table. @@ -83,9 +106,7 @@ df = pq.read_table("/tmp/yellow_tripdata_2023-01.parquet") Create a new Iceberg table: ```python -from pyiceberg.catalog import load_catalog - -catalog = load_catalog("default") +catalog.create_namespace("default") table = catalog.create_table( "default.taxi_dataset", @@ -158,6 +179,14 @@ df = table.scan(row_filter="tip_per_mile > 0").to_arrow() len(df) ``` +### Explore Iceberg data and metadata files + +Since the catalog was configured to use the local filesystem, we can explore how Iceberg saved data and metadata files from the above operations. + +```shell +find /tmp/warehouse/ +``` + ## More details For the details, please check the [CLI](cli.md) or [Python API](api.md) page. From a276112e12a481c5f461a46571b4554b74a67586 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Sun, 4 Feb 2024 13:41:45 -0800 Subject: [PATCH 073/169] sql-catalog: Default `create_engine` echo to `False` (#360) * default create_engine echo to False * Update pyiceberg/catalog/sql.py Co-authored-by: Fokko Driesprong --------- Co-authored-by: Fokko Driesprong --- pyiceberg/catalog/sql.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyiceberg/catalog/sql.py b/pyiceberg/catalog/sql.py index 8a02b20dfc..9436e208ca 100644 --- a/pyiceberg/catalog/sql.py +++ b/pyiceberg/catalog/sql.py @@ -101,7 +101,8 @@ def __init__(self, name: str, **properties: str): if not (uri_prop := self.properties.get("uri")): raise NoSuchPropertyException("SQL connection URI is required") - self.engine = create_engine(uri_prop, echo=True) + echo = bool(self.properties.get("echo", False)) + self.engine = create_engine(uri_prop, echo=echo) self._ensure_tables_exist() From a3d529b690c93483e413940f61d98e8265db4657 Mon Sep 17 00:00:00 2001 From: Rahij Ramsharan Date: Sun, 4 Feb 2024 21:42:50 +0000 Subject: [PATCH 074/169] sql-catalog: Fix Postgres initialization when tables do not exist (#356) * fix postgres catalog initialization when tables do not exist * Format --------- Co-authored-by: Fokko Driesprong --- pyiceberg/catalog/sql.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/pyiceberg/catalog/sql.py b/pyiceberg/catalog/sql.py index 9436e208ca..62a2dac54a 100644 --- a/pyiceberg/catalog/sql.py +++ b/pyiceberg/catalog/sql.py @@ -32,7 +32,7 @@ union, update, ) -from sqlalchemy.exc import IntegrityError, NoResultFound, OperationalError +from sqlalchemy.exc import IntegrityError, NoResultFound, OperationalError, ProgrammingError from sqlalchemy.orm import ( DeclarativeBase, Mapped, @@ -112,7 +112,10 @@ def _ensure_tables_exist(self) -> None: stmt = select(1).select_from(table) try: session.scalar(stmt) - except OperationalError: + except ( + OperationalError, + ProgrammingError, + ): # sqlalchemy returns OperationalError in case of sqlite and ProgrammingError with postgres. self.create_tables() return From 9e4ed296de7448e036dedc227b71dc51214fb679 Mon Sep 17 00:00:00 2001 From: Jonas Haag Date: Mon, 5 Feb 2024 10:40:43 +0100 Subject: [PATCH 075/169] Use `write.parquet.compression-{codec,level}` (#358) * Use `write.parquet.compression-{codec,level}` * Cleanup * Review feedback * Review feedback * Review feedback * Update pyiceberg/io/pyarrow.py Co-authored-by: Fokko Driesprong * Fixup * Fixup * Fixup --------- Co-authored-by: Fokko Driesprong --- pyiceberg/io/pyarrow.py | 44 +++++- tests/integration/test_reads.py | 6 +- tests/integration/test_writes.py | 254 ++++++++++++++++--------------- 3 files changed, 173 insertions(+), 131 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 3e056b12b4..f7c0aef68c 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -26,6 +26,7 @@ from __future__ import annotations import concurrent.futures +import fnmatch import itertools import logging import os @@ -1720,13 +1721,14 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: except StopIteration: pass + parquet_writer_kwargs = _get_parquet_writer_kwargs(table.properties) + file_path = f'{table.location()}/data/{task.generate_data_file_filename("parquet")}' file_schema = schema_to_pyarrow(table.schema()) - collected_metrics: List[pq.FileMetaData] = [] fo = table.io.new_output(file_path) with fo.create(overwrite=True) as fos: - with pq.ParquetWriter(fos, schema=file_schema, version="1.0", metadata_collector=collected_metrics) as writer: + with pq.ParquetWriter(fos, schema=file_schema, version="1.0", **parquet_writer_kwargs) as writer: writer.write_table(task.df) data_file = DataFile( @@ -1745,14 +1747,42 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: key_metadata=None, ) - if len(collected_metrics) != 1: - # One file has been written - raise ValueError(f"Expected 1 entry, got: {collected_metrics}") - fill_parquet_file_metadata( data_file=data_file, - parquet_metadata=collected_metrics[0], + parquet_metadata=writer.writer.metadata, stats_columns=compute_statistics_plan(table.schema(), table.properties), parquet_column_mapping=parquet_path_to_id_mapping(table.schema()), ) return iter([data_file]) + + +def _get_parquet_writer_kwargs(table_properties: Properties) -> Dict[str, Any]: + def _get_int(key: str) -> Optional[int]: + if value := table_properties.get(key): + try: + return int(value) + except ValueError as e: + raise ValueError(f"Could not parse table property {key} to an integer: {value}") from e + else: + return None + + for key_pattern in [ + "write.parquet.row-group-size-bytes", + "write.parquet.page-row-limit", + "write.parquet.bloom-filter-max-bytes", + "write.parquet.bloom-filter-enabled.column.*", + ]: + if unsupported_keys := fnmatch.filter(table_properties, key_pattern): + raise NotImplementedError(f"Parquet writer option(s) {unsupported_keys} not implemented") + + compression_codec = table_properties.get("write.parquet.compression-codec", "zstd") + compression_level = _get_int("write.parquet.compression-level") + if compression_codec == "uncompressed": + compression_codec = "none" + + return { + "compression": compression_codec, + "compression_level": compression_level, + "data_page_size": _get_int("write.parquet.page-size-bytes"), + "dictionary_pagesize_limit": _get_int("write.parquet.dict-size-bytes"), + } diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index 3fc06fbddc..b35b348638 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -224,7 +224,11 @@ def test_ray_all_types(catalog: Catalog) -> None: @pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) def test_pyarrow_to_iceberg_all_types(catalog: Catalog) -> None: table_test_all_types = catalog.load_table("default.test_all_types") - fs = S3FileSystem(endpoint_override="http://localhost:9000", access_key="admin", secret_key="password") + fs = S3FileSystem( + endpoint_override=catalog.properties["s3.endpoint"], + access_key=catalog.properties["s3.access-key-id"], + secret_key=catalog.properties["s3.secret-access-key"], + ) data_file_paths = [task.file.file_path for task in table_test_all_types.scan().plan_files()] for data_file_path in data_file_paths: uri = urlparse(data_file_path) diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index c8552a2e96..a65d98fc9e 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -17,12 +17,17 @@ # pylint:disable=redefined-outer-name import uuid from datetime import date, datetime +from typing import Any, Dict, List +from urllib.parse import urlparse import pyarrow as pa +import pyarrow.parquet as pq import pytest +from pyarrow.fs import S3FileSystem from pyspark.sql import SparkSession +from pytest_mock.plugin import MockerFixture -from pyiceberg.catalog import Catalog, load_catalog +from pyiceberg.catalog import Catalog, Properties, Table, load_catalog from pyiceberg.exceptions import NamespaceAlreadyExistsError, NoSuchTableError from pyiceberg.schema import Schema from pyiceberg.types import ( @@ -158,142 +163,79 @@ def arrow_table_with_only_nulls(pa_schema: pa.Schema) -> pa.Table: return pa.Table.from_pylist([{}, {}], schema=pa_schema) -@pytest.fixture(scope="session", autouse=True) -def table_v1_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.arrow_table_v1_with_null" - +def _create_table(session_catalog: Catalog, identifier: str, properties: Properties, data: List[pa.Table]) -> Table: try: session_catalog.drop_table(identifier=identifier) except NoSuchTableError: pass - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) - tbl.append(arrow_table_with_null) + tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties=properties) + for d in data: + tbl.append(d) + + return tbl + +@pytest.fixture(scope="session", autouse=True) +def table_v1_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_table_v1_with_null" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, [arrow_table_with_null]) assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" @pytest.fixture(scope="session", autouse=True) def table_v1_without_data(session_catalog: Catalog, arrow_table_without_data: pa.Table) -> None: identifier = "default.arrow_table_v1_without_data" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) - tbl.append(arrow_table_without_data) - + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, [arrow_table_without_data]) assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" @pytest.fixture(scope="session", autouse=True) def table_v1_with_only_nulls(session_catalog: Catalog, arrow_table_with_only_nulls: pa.Table) -> None: identifier = "default.arrow_table_v1_with_only_nulls" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) - tbl.append(arrow_table_with_only_nulls) - + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, [arrow_table_with_only_nulls]) assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" @pytest.fixture(scope="session", autouse=True) def table_v1_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_table_v1_appended_with_null" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) - - for _ in range(2): - tbl.append(arrow_table_with_null) - + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, 2 * [arrow_table_with_null]) assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" @pytest.fixture(scope="session", autouse=True) def table_v2_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_table_v2_with_null" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '2'}) - tbl.append(arrow_table_with_null) - + tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_null]) assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" @pytest.fixture(scope="session", autouse=True) def table_v2_without_data(session_catalog: Catalog, arrow_table_without_data: pa.Table) -> None: identifier = "default.arrow_table_v2_without_data" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '2'}) - tbl.append(arrow_table_without_data) - + tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_without_data]) assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" @pytest.fixture(scope="session", autouse=True) def table_v2_with_only_nulls(session_catalog: Catalog, arrow_table_with_only_nulls: pa.Table) -> None: identifier = "default.arrow_table_v2_with_only_nulls" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '2'}) - tbl.append(arrow_table_with_only_nulls) - + tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, [arrow_table_with_only_nulls]) assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" @pytest.fixture(scope="session", autouse=True) def table_v2_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_table_v2_appended_with_null" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '2'}) - - for _ in range(2): - tbl.append(arrow_table_with_null) - + tbl = _create_table(session_catalog, identifier, {"format-version": "2"}, 2 * [arrow_table_with_null]) assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" @pytest.fixture(scope="session", autouse=True) def table_v1_v2_appended_with_null(session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_table_v1_v2_appended_with_null" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) - tbl.append(arrow_table_with_null) - + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, [arrow_table_with_null]) assert tbl.format_version == 1, f"Expected v1, got: v{tbl.format_version}" with tbl.transaction() as tx: @@ -397,15 +339,7 @@ def test_query_filter_v1_v2_append_null(spark: SparkSession, col: str) -> None: @pytest.mark.integration def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_table_summaries" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) - - tbl.append(arrow_table_with_null) - tbl.append(arrow_table_with_null) + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, 2 * [arrow_table_with_null]) tbl.overwrite(arrow_table_with_null) rows = spark.sql( @@ -423,39 +357,39 @@ def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_wi assert summaries[0] == { 'added-data-files': '1', - 'added-files-size': '5283', + 'added-files-size': '5437', 'added-records': '3', 'total-data-files': '1', 'total-delete-files': '0', 'total-equality-deletes': '0', - 'total-files-size': '5283', + 'total-files-size': '5437', 'total-position-deletes': '0', 'total-records': '3', } assert summaries[1] == { 'added-data-files': '1', - 'added-files-size': '5283', + 'added-files-size': '5437', 'added-records': '3', 'total-data-files': '2', 'total-delete-files': '0', 'total-equality-deletes': '0', - 'total-files-size': '10566', + 'total-files-size': '10874', 'total-position-deletes': '0', 'total-records': '6', } assert summaries[2] == { 'added-data-files': '1', - 'added-files-size': '5283', + 'added-files-size': '5437', 'added-records': '3', 'deleted-data-files': '2', 'deleted-records': '6', - 'removed-files-size': '10566', + 'removed-files-size': '10874', 'total-data-files': '1', 'total-delete-files': '0', 'total-equality-deletes': '0', - 'total-files-size': '5283', + 'total-files-size': '5437', 'total-position-deletes': '0', 'total-records': '3', } @@ -464,12 +398,7 @@ def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_wi @pytest.mark.integration def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: identifier = "default.arrow_data_files" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, []) tbl.overwrite(arrow_table_with_null) # should produce a DELETE entry @@ -490,15 +419,100 @@ def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_w @pytest.mark.integration -def test_invalid_arguments(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: - identifier = "default.arrow_data_files" +@pytest.mark.parametrize("format_version", ["1", "2"]) +@pytest.mark.parametrize( + "properties, expected_compression_name", + [ + # REST catalog uses Zstandard by default: https://github.com/apache/iceberg/pull/8593 + ({}, "ZSTD"), + ({"write.parquet.compression-codec": "uncompressed"}, "UNCOMPRESSED"), + ({"write.parquet.compression-codec": "gzip", "write.parquet.compression-level": "1"}, "GZIP"), + ({"write.parquet.compression-codec": "zstd", "write.parquet.compression-level": "1"}, "ZSTD"), + ({"write.parquet.compression-codec": "snappy"}, "SNAPPY"), + ], +) +def test_write_parquet_compression_properties( + spark: SparkSession, + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + format_version: str, + properties: Dict[str, Any], + expected_compression_name: str, +) -> None: + identifier = "default.write_parquet_compression_properties" - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass + tbl = _create_table(session_catalog, identifier, {"format-version": format_version, **properties}, [arrow_table_with_null]) - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) + data_file_paths = [task.file.file_path for task in tbl.scan().plan_files()] + + fs = S3FileSystem( + endpoint_override=session_catalog.properties["s3.endpoint"], + access_key=session_catalog.properties["s3.access-key-id"], + secret_key=session_catalog.properties["s3.secret-access-key"], + ) + uri = urlparse(data_file_paths[0]) + with fs.open_input_file(f"{uri.netloc}{uri.path}") as f: + parquet_metadata = pq.read_metadata(f) + compression = parquet_metadata.row_group(0).column(0).compression + + assert compression == expected_compression_name + + +@pytest.mark.integration +@pytest.mark.parametrize( + "properties, expected_kwargs", + [ + ({"write.parquet.page-size-bytes": "42"}, {"data_page_size": 42}), + ({"write.parquet.dict-size-bytes": "42"}, {"dictionary_pagesize_limit": 42}), + ], +) +def test_write_parquet_other_properties( + mocker: MockerFixture, + spark: SparkSession, + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + properties: Dict[str, Any], + expected_kwargs: Dict[str, Any], +) -> None: + print(type(mocker)) + identifier = "default.test_write_parquet_other_properties" + + # The properties we test cannot be checked on the resulting Parquet file, so we spy on the ParquetWriter call instead + ParquetWriter = mocker.spy(pq, "ParquetWriter") + _create_table(session_catalog, identifier, properties, [arrow_table_with_null]) + + call_kwargs = ParquetWriter.call_args[1] + for key, value in expected_kwargs.items(): + assert call_kwargs.get(key) == value + + +@pytest.mark.integration +@pytest.mark.parametrize( + "properties", + [ + {"write.parquet.row-group-size-bytes": "42"}, + {"write.parquet.page-row-limit": "42"}, + {"write.parquet.bloom-filter-enabled.column.bool": "42"}, + {"write.parquet.bloom-filter-max-bytes": "42"}, + ], +) +def test_write_parquet_unsupported_properties( + spark: SparkSession, + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + properties: Dict[str, str], +) -> None: + identifier = "default.write_parquet_unsupported_properties" + + tbl = _create_table(session_catalog, identifier, properties, []) + with pytest.raises(NotImplementedError): + tbl.append(arrow_table_with_null) + + +@pytest.mark.integration +def test_invalid_arguments(spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table) -> None: + identifier = "default.arrow_data_files" + tbl = _create_table(session_catalog, identifier, {'format-version': '1'}, []) with pytest.raises(ValueError, match="Expected PyArrow table, got: not a df"): tbl.overwrite("not a df") @@ -512,15 +526,9 @@ def test_summaries_with_only_nulls( spark: SparkSession, session_catalog: Catalog, arrow_table_without_data: pa.Table, arrow_table_with_only_nulls: pa.Table ) -> None: identifier = "default.arrow_table_summaries_with_only_nulls" - - try: - session_catalog.drop_table(identifier=identifier) - except NoSuchTableError: - pass - tbl = session_catalog.create_table(identifier=identifier, schema=TABLE_SCHEMA, properties={'format-version': '1'}) - - tbl.append(arrow_table_without_data) - tbl.append(arrow_table_with_only_nulls) + tbl = _create_table( + session_catalog, identifier, {'format-version': '1'}, [arrow_table_without_data, arrow_table_with_only_nulls] + ) tbl.overwrite(arrow_table_without_data) rows = spark.sql( @@ -547,12 +555,12 @@ def test_summaries_with_only_nulls( assert summaries[1] == { 'added-data-files': '1', - 'added-files-size': '4045', + 'added-files-size': '4217', 'added-records': '2', 'total-data-files': '1', 'total-delete-files': '0', 'total-equality-deletes': '0', - 'total-files-size': '4045', + 'total-files-size': '4217', 'total-position-deletes': '0', 'total-records': '2', } From 40ab60a11947858fa36ca54ace494ddad7e315c2 Mon Sep 17 00:00:00 2001 From: Honah J Date: Mon, 5 Feb 2024 01:42:17 -0800 Subject: [PATCH 076/169] glue: Allow `TimestampzType` (#366) --- pyiceberg/catalog/glue.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index 8f860fabba..06484cb0e4 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -85,6 +85,7 @@ StringType, StructType, TimestampType, + TimestamptzType, TimeType, UUIDType, ) @@ -125,6 +126,7 @@ def _construct_parameters( StringType: "string", UUIDType: "string", TimestampType: "timestamp", + TimestamptzType: "timestamp", FixedType: "binary", BinaryType: "binary", } @@ -150,7 +152,7 @@ def primitive(self, primitive: PrimitiveType) -> str: if isinstance(primitive, DecimalType): return f"decimal({primitive.precision},{primitive.scale})" if (primitive_type := type(primitive)) not in GLUE_PRIMITIVE_TYPES: - raise ValueError(f"Unknown primitive type: {primitive}") + return str(primitive_type.root) return GLUE_PRIMITIVE_TYPES[primitive_type] From 622adb799e1c5acb48be95ba14a670bafef54a61 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Mon, 5 Feb 2024 16:25:59 +0100 Subject: [PATCH 077/169] docs: Document Parquet write options (#364) * docs: Document Parquet write options * Move to tables section * Default to 2MB * Revert some unrelated changes * Update configuration.md Co-authored-by: Honah J. --------- Co-authored-by: Honah J. --- mkdocs/docs/configuration.md | 15 ++++++++++++++- pyiceberg/io/pyarrow.py | 6 +++--- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index bfe1e62fac..ce17931169 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -46,7 +46,20 @@ The environment variable picked up by Iceberg starts with `PYICEBERG_` and then For example, `PYICEBERG_CATALOG__DEFAULT__S3__ACCESS_KEY_ID`, sets `s3.access-key-id` on the `default` catalog. -## FileIO +# Tables + +Iceberg tables support table properties to configure table behavior. + +## Write options + +| Key | Options | Default | Description | +| --------------------------------- | --------------------------------- | ------- | ------------------------------------------------------------------------------------------- | +| `write.parquet.compression-codec` | `{uncompressed,zstd,gzip,snappy}` | zstd | Sets the Parquet compression coddec. | +| `write.parquet.compression-level` | Integer | null | Parquet compression level for the codec. If not set, it is up to PyIceberg | +| `write.parquet.page-size-bytes` | Size in bytes | 1MB | Set a target threshold for the approximate encoded size of data pages within a column chunk | +| `write.parquet.dict-size-bytes` | Size in bytes | 2MB | Set the dictionary page size limit per row group | + +# FileIO Iceberg works with the concept of a FileIO which is a pluggable module for reading, writing, and deleting files. By default, PyIceberg will try to initialize the FileIO that's suitable for the scheme (`s3://`, `gs://`, etc.) and will use the first one that's installed. diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index f7c0aef68c..99c1af5ad6 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -1757,14 +1757,14 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: def _get_parquet_writer_kwargs(table_properties: Properties) -> Dict[str, Any]: - def _get_int(key: str) -> Optional[int]: + def _get_int(key: str, default: Optional[int] = None) -> Optional[int]: if value := table_properties.get(key): try: return int(value) except ValueError as e: raise ValueError(f"Could not parse table property {key} to an integer: {value}") from e else: - return None + return default for key_pattern in [ "write.parquet.row-group-size-bytes", @@ -1784,5 +1784,5 @@ def _get_int(key: str) -> Optional[int]: "compression": compression_codec, "compression_level": compression_level, "data_page_size": _get_int("write.parquet.page-size-bytes"), - "dictionary_pagesize_limit": _get_int("write.parquet.dict-size-bytes"), + "dictionary_pagesize_limit": _get_int("write.parquet.dict-size-bytes", default=2 * 1024 * 1024), } From 5ddf1edb50a84149384a80ed8f6ffac6e34fbe31 Mon Sep 17 00:00:00 2001 From: Hussein Awala Date: Tue, 6 Feb 2024 03:50:47 +0100 Subject: [PATCH 078/169] Build: Bump moto from 4.2.13 to 5.0.1 (#373) --- poetry.lock | 47 ++++++++++----------- pyproject.toml | 2 +- tests/catalog/test_dynamodb.py | 68 +++++++++++++++---------------- tests/catalog/test_glue.py | 74 +++++++++++++++++----------------- tests/conftest.py | 14 ++++--- 5 files changed, 101 insertions(+), 104 deletions(-) diff --git a/poetry.lock b/poetry.lock index ff67dd28c6..9d34a7a435 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1963,19 +1963,19 @@ files = [ [[package]] name = "moto" -version = "4.2.13" +version = "5.0.1" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "moto-4.2.13-py2.py3-none-any.whl", hash = "sha256:93e0fd13b624bd79115494f833308c3641b2be0fc9f4f18aa9264aa01f6168e0"}, - {file = "moto-4.2.13.tar.gz", hash = "sha256:01aef6a489a725c8d725bd3dc6f70ff1bedaee3e2641752e4b471ff0ede4b4d7"}, + {file = "moto-5.0.1-py2.py3-none-any.whl", hash = "sha256:94e3b07a403cc8078ffee94bf404ef677112d036a57ddb5e0f19c5fcf48987f5"}, + {file = "moto-5.0.1.tar.gz", hash = "sha256:62b9798aef9028432194cebb7a671f4064257bb3be662d9c1b83b94411b694bb"}, ] [package.dependencies] aws-xray-sdk = {version = ">=0.93,<0.96 || >0.96", optional = true, markers = "extra == \"server\""} boto3 = ">=1.9.201" -botocore = ">=1.12.201" +botocore = ">=1.14.0" cfn-lint = {version = ">=0.40.0", optional = true, markers = "extra == \"server\""} cryptography = ">=3.3.1" docker = {version = ">=3.0.0", optional = true, markers = "extra == \"server\""} @@ -1986,42 +1986,37 @@ graphql-core = {version = "*", optional = true, markers = "extra == \"server\""} Jinja2 = ">=2.10.1" jsondiff = {version = ">=1.1.2", optional = true, markers = "extra == \"server\""} openapi-spec-validator = {version = ">=0.5.0", optional = true, markers = "extra == \"server\""} -py-partiql-parser = {version = "0.5.0", optional = true, markers = "extra == \"server\""} +py-partiql-parser = {version = "0.5.1", optional = true, markers = "extra == \"server\""} pyparsing = {version = ">=3.0.7", optional = true, markers = "extra == \"server\""} python-dateutil = ">=2.1,<3.0.0" python-jose = {version = ">=3.1.0,<4.0.0", extras = ["cryptography"], optional = true, markers = "extra == \"server\""} PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"server\""} requests = ">=2.5" -responses = ">=0.13.0" +responses = ">=0.15.0" setuptools = {version = "*", optional = true, markers = "extra == \"server\""} sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"server\""} werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] -apigatewayv2 = ["PyYAML (>=5.1)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] -ds = ["sshpubkeys (>=3.1.0)"] -dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"] -dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.0)"] -ebs = ["sshpubkeys (>=3.1.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] ec2 = ["sshpubkeys (>=3.1.0)"] -efs = ["sshpubkeys (>=3.1.0)"] -eks = ["sshpubkeys (>=3.1.0)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"] -route53resolver = ["sshpubkeys (>=3.1.0)"] -s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.0)"] -s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.0)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.0)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.1)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.1)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] ssm = ["PyYAML (>=5.1)"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] @@ -2689,13 +2684,13 @@ files = [ [[package]] name = "py-partiql-parser" -version = "0.5.0" +version = "0.5.1" description = "Pure Python PartiQL Parser" optional = false python-versions = "*" files = [ - {file = "py-partiql-parser-0.5.0.tar.gz", hash = "sha256:427a662e87d51a0a50150fc8b75c9ebb4a52d49129684856c40c88b8c8e027e4"}, - {file = "py_partiql_parser-0.5.0-py3-none-any.whl", hash = "sha256:dc454c27526adf62deca5177ea997bf41fac4fd109c5d4c8d81f984de738ba8f"}, + {file = "py-partiql-parser-0.5.1.tar.gz", hash = "sha256:aeac8f46529d8651bbae88a1a6c14dc3aa38ebc4bc6bd1eb975044c0564246c6"}, + {file = "py_partiql_parser-0.5.1-py3-none-any.whl", hash = "sha256:53053e70987dea2983e1990ad85f87a7d8cec13dd4a4b065a740bcfd661f5a6b"}, ] [package.extras] @@ -4293,4 +4288,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "5becf154a072fa31d9cefaee2adaacca1e2315579fd821a4567f41b075ed3b1d" +content-hash = "6e7fbfc5ac9579e08a6deccb0dd3d80bf11bc3113302772b07c8bb3329a0ae72" diff --git a/pyproject.toml b/pyproject.toml index f823155382..9fcb91b0c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ pre-commit = "3.5.0" fastavro = "1.9.3" coverage = { version = "^7.4.1", extras = ["toml"] } requests-mock = "1.11.0" -moto = { version = "^4.2.13", extras = ["server"] } +moto = { version = "^5.0.1", extras = ["server"] } typing-extensions = "4.9.0" pytest-mock = "3.12.0" pyspark = "3.5.0" diff --git a/tests/catalog/test_dynamodb.py b/tests/catalog/test_dynamodb.py index bc801463c5..2cb7c46a15 100644 --- a/tests/catalog/test_dynamodb.py +++ b/tests/catalog/test_dynamodb.py @@ -20,7 +20,7 @@ import boto3 import pyarrow as pa import pytest -from moto import mock_dynamodb +from moto import mock_aws from pyiceberg.catalog import METADATA_LOCATION, TABLE_TYPE from pyiceberg.catalog.dynamodb import ( @@ -45,7 +45,7 @@ from tests.conftest import BUCKET_NAME, TABLE_METADATA_LOCATION_REGEX -@mock_dynamodb +@mock_aws def test_create_dynamodb_catalog_with_table_name(_dynamodb, _bucket_initialize: None) -> None: # type: ignore DynamoDbCatalog("test_ddb_catalog") response = _dynamodb.describe_table(TableName=DYNAMODB_TABLE_NAME_DEFAULT) @@ -59,7 +59,7 @@ def test_create_dynamodb_catalog_with_table_name(_dynamodb, _bucket_initialize: assert response["Table"]["TableStatus"] == ACTIVE -@mock_dynamodb +@mock_aws def test_create_table_with_database_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -72,7 +72,7 @@ def test_create_table_with_database_location( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_pyarrow_schema( _bucket_initialize: None, moto_endpoint_url: str, @@ -89,7 +89,7 @@ def test_create_table_with_pyarrow_schema( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_default_warehouse( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -102,7 +102,7 @@ def test_create_table_with_default_warehouse( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_given_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -117,7 +117,7 @@ def test_create_table_with_given_location( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_no_location( _bucket_initialize: None, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -128,7 +128,7 @@ def test_create_table_with_no_location( test_catalog.create_table(identifier=identifier, schema=table_schema_nested) -@mock_dynamodb +@mock_aws def test_create_table_with_strips( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -141,7 +141,7 @@ def test_create_table_with_strips( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_strips_bucket_root( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -154,7 +154,7 @@ def test_create_table_with_strips_bucket_root( assert TABLE_METADATA_LOCATION_REGEX.match(table_strip.metadata_location) -@mock_dynamodb +@mock_aws def test_create_table_with_no_database( _bucket_initialize: None, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -164,7 +164,7 @@ def test_create_table_with_no_database( test_catalog.create_table(identifier=identifier, schema=table_schema_nested) -@mock_dynamodb +@mock_aws def test_create_duplicated_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -176,7 +176,7 @@ def test_create_duplicated_table( test_catalog.create_table(identifier, table_schema_nested) -@mock_dynamodb +@mock_aws def test_load_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -190,7 +190,7 @@ def test_load_table( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_load_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -205,7 +205,7 @@ def test_load_table_from_self_identifier( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_dynamodb +@mock_aws def test_load_non_exist_table(_bucket_initialize: None, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog = DynamoDbCatalog("test_ddb_catalog", warehouse=f"s3://{BUCKET_NAME}") @@ -214,7 +214,7 @@ def test_load_non_exist_table(_bucket_initialize: None, database_name: str, tabl test_catalog.load_table(identifier) -@mock_dynamodb +@mock_aws def test_drop_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -231,7 +231,7 @@ def test_drop_table( test_catalog.load_table(identifier) -@mock_dynamodb +@mock_aws def test_drop_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -250,7 +250,7 @@ def test_drop_table_from_self_identifier( test_catalog.load_table(table.identifier) -@mock_dynamodb +@mock_aws def test_drop_non_exist_table(_bucket_initialize: None, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog = DynamoDbCatalog("test_ddb_catalog", warehouse=f"s3://{BUCKET_NAME}") @@ -258,7 +258,7 @@ def test_drop_non_exist_table(_bucket_initialize: None, database_name: str, tabl test_catalog.drop_table(identifier) -@mock_dynamodb +@mock_aws def test_rename_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -281,7 +281,7 @@ def test_rename_table( test_catalog.load_table(identifier) -@mock_dynamodb +@mock_aws def test_rename_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -306,7 +306,7 @@ def test_rename_table_from_self_identifier( test_catalog.load_table(table.identifier) -@mock_dynamodb +@mock_aws def test_fail_on_rename_table_with_missing_required_params(_bucket_initialize: None, database_name: str, table_name: str) -> None: new_database_name = f"{database_name}_new" new_table_name = f"{table_name}_new" @@ -330,7 +330,7 @@ def test_fail_on_rename_table_with_missing_required_params(_bucket_initialize: N test_catalog.rename_table(identifier, new_identifier) -@mock_dynamodb +@mock_aws def test_fail_on_rename_non_iceberg_table( _dynamodb: boto3.client, _bucket_initialize: None, database_name: str, table_name: str ) -> None: @@ -359,7 +359,7 @@ def test_fail_on_rename_non_iceberg_table( test_catalog.rename_table(identifier, new_identifier) -@mock_dynamodb +@mock_aws def test_list_tables( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_list: List[str] ) -> None: @@ -372,7 +372,7 @@ def test_list_tables( assert (database_name, table_name) in loaded_table_list -@mock_dynamodb +@mock_aws def test_list_namespaces(_bucket_initialize: None, database_list: List[str]) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") for database_name in database_list: @@ -382,7 +382,7 @@ def test_list_namespaces(_bucket_initialize: None, database_list: List[str]) -> assert (database_name,) in loaded_database_list -@mock_dynamodb +@mock_aws def test_create_namespace_no_properties(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") test_catalog.create_namespace(namespace=database_name) @@ -393,7 +393,7 @@ def test_create_namespace_no_properties(_bucket_initialize: None, database_name: assert properties == {} -@mock_dynamodb +@mock_aws def test_create_namespace_with_comment_and_location(_bucket_initialize: None, database_name: str) -> None: test_location = f"s3://{BUCKET_NAME}/{database_name}.db" test_properties = { @@ -410,7 +410,7 @@ def test_create_namespace_with_comment_and_location(_bucket_initialize: None, da assert properties["location"] == test_location -@mock_dynamodb +@mock_aws def test_create_duplicated_namespace(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") test_catalog.create_namespace(namespace=database_name) @@ -421,7 +421,7 @@ def test_create_duplicated_namespace(_bucket_initialize: None, database_name: st test_catalog.create_namespace(namespace=database_name, properties={"test": "test"}) -@mock_dynamodb +@mock_aws def test_drop_namespace(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") test_catalog.create_namespace(namespace=database_name) @@ -433,7 +433,7 @@ def test_drop_namespace(_bucket_initialize: None, database_name: str) -> None: assert len(loaded_database_list) == 0 -@mock_dynamodb +@mock_aws def test_drop_non_empty_namespace( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -446,14 +446,14 @@ def test_drop_non_empty_namespace( test_catalog.drop_namespace(database_name) -@mock_dynamodb +@mock_aws def test_drop_non_exist_namespace(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") with pytest.raises(NoSuchNamespaceError): test_catalog.drop_namespace(database_name) -@mock_dynamodb +@mock_aws def test_load_namespace_properties(_bucket_initialize: None, database_name: str) -> None: test_location = f"s3://{BUCKET_NAME}/{database_name}.db" test_properties = { @@ -471,14 +471,14 @@ def test_load_namespace_properties(_bucket_initialize: None, database_name: str) assert v == test_properties[k] -@mock_dynamodb +@mock_aws def test_load_non_exist_namespace_properties(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") with pytest.raises(NoSuchNamespaceError): test_catalog.load_namespace_properties(database_name) -@mock_dynamodb +@mock_aws def test_update_namespace_properties(_bucket_initialize: None, database_name: str) -> None: test_properties = { "comment": "this is a test description", @@ -503,7 +503,7 @@ def test_update_namespace_properties(_bucket_initialize: None, database_name: st test_catalog.drop_namespace(database_name) -@mock_dynamodb +@mock_aws def test_load_empty_namespace_properties(_bucket_initialize: None, database_name: str) -> None: test_catalog = DynamoDbCatalog("test_ddb_catalog") test_catalog.create_namespace(database_name) @@ -511,7 +511,7 @@ def test_load_empty_namespace_properties(_bucket_initialize: None, database_name assert listed_properties == {} -@mock_dynamodb +@mock_aws def test_update_namespace_properties_overlap_update_removal(_bucket_initialize: None, database_name: str) -> None: test_properties = { "comment": "this is a test description", diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index 63a213f94f..270d2251ba 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -20,7 +20,7 @@ import boto3 import pyarrow as pa import pytest -from moto import mock_glue +from moto import mock_aws from pyiceberg.catalog.glue import GlueCatalog from pyiceberg.exceptions import ( @@ -37,7 +37,7 @@ from tests.conftest import BUCKET_NAME, TABLE_METADATA_LOCATION_REGEX -@mock_glue +@mock_aws def test_create_table_with_database_location( _glue: boto3.client, _bucket_initialize: None, @@ -72,7 +72,7 @@ def test_create_table_with_database_location( assert storage_descriptor["Location"] == f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}" -@mock_glue +@mock_aws def test_create_table_with_default_warehouse( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -86,7 +86,7 @@ def test_create_table_with_default_warehouse( assert test_catalog._parse_metadata_version(table.metadata_location) == 0 -@mock_glue +@mock_aws def test_create_table_with_given_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -102,7 +102,7 @@ def test_create_table_with_given_location( assert test_catalog._parse_metadata_version(table.metadata_location) == 0 -@mock_glue +@mock_aws def test_create_table_with_pyarrow_schema( _bucket_initialize: None, moto_endpoint_url: str, @@ -124,7 +124,7 @@ def test_create_table_with_pyarrow_schema( assert test_catalog._parse_metadata_version(table.metadata_location) == 0 -@mock_glue +@mock_aws def test_create_table_with_no_location( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -136,7 +136,7 @@ def test_create_table_with_no_location( test_catalog.create_table(identifier=identifier, schema=table_schema_nested) -@mock_glue +@mock_aws def test_create_table_with_strips( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -150,7 +150,7 @@ def test_create_table_with_strips( assert test_catalog._parse_metadata_version(table.metadata_location) == 0 -@mock_glue +@mock_aws def test_create_table_with_strips_bucket_root( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -164,7 +164,7 @@ def test_create_table_with_strips_bucket_root( assert test_catalog._parse_metadata_version(table_strip.metadata_location) == 0 -@mock_glue +@mock_aws def test_create_table_with_no_database( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -174,7 +174,7 @@ def test_create_table_with_no_database( test_catalog.create_table(identifier=identifier, schema=table_schema_nested) -@mock_glue +@mock_aws def test_create_duplicated_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -186,7 +186,7 @@ def test_create_duplicated_table( test_catalog.create_table(identifier, table_schema_nested) -@mock_glue +@mock_aws def test_load_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -201,7 +201,7 @@ def test_load_table( assert test_catalog._parse_metadata_version(table.metadata_location) == 0 -@mock_glue +@mock_aws def test_load_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -215,7 +215,7 @@ def test_load_table_from_self_identifier( assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) -@mock_glue +@mock_aws def test_load_non_exist_table(_bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}/"}) @@ -224,7 +224,7 @@ def test_load_non_exist_table(_bucket_initialize: None, moto_endpoint_url: str, test_catalog.load_table(identifier) -@mock_glue +@mock_aws def test_drop_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -241,7 +241,7 @@ def test_drop_table( test_catalog.load_table(identifier) -@mock_glue +@mock_aws def test_drop_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -260,7 +260,7 @@ def test_drop_table_from_self_identifier( test_catalog.load_table(table.identifier) -@mock_glue +@mock_aws def test_drop_non_exist_table(_bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}/"}) @@ -268,7 +268,7 @@ def test_drop_non_exist_table(_bucket_initialize: None, moto_endpoint_url: str, test_catalog.drop_table(identifier) -@mock_glue +@mock_aws def test_rename_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -292,7 +292,7 @@ def test_rename_table( test_catalog.load_table(identifier) -@mock_glue +@mock_aws def test_rename_table_from_self_identifier( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -317,7 +317,7 @@ def test_rename_table_from_self_identifier( test_catalog.load_table(table.identifier) -@mock_glue +@mock_aws def test_rename_table_no_params( _glue: boto3.client, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str ) -> None: @@ -336,7 +336,7 @@ def test_rename_table_no_params( test_catalog.rename_table(identifier, new_identifier) -@mock_glue +@mock_aws def test_rename_non_iceberg_table( _glue: boto3.client, _bucket_initialize: None, moto_endpoint_url: str, database_name: str, table_name: str ) -> None: @@ -359,7 +359,7 @@ def test_rename_non_iceberg_table( test_catalog.rename_table(identifier, new_identifier) -@mock_glue +@mock_aws def test_list_tables( _bucket_initialize: None, moto_endpoint_url: str, @@ -377,7 +377,7 @@ def test_list_tables( assert (database_name, table_name) in loaded_table_list -@mock_glue +@mock_aws def test_list_namespaces(_bucket_initialize: None, moto_endpoint_url: str, database_list: List[str]) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) for database_name in database_list: @@ -387,7 +387,7 @@ def test_list_namespaces(_bucket_initialize: None, moto_endpoint_url: str, datab assert (database_name,) in loaded_database_list -@mock_glue +@mock_aws def test_create_namespace_no_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) test_catalog.create_namespace(namespace=database_name) @@ -398,7 +398,7 @@ def test_create_namespace_no_properties(_bucket_initialize: None, moto_endpoint_ assert properties == {} -@mock_glue +@mock_aws def test_create_namespace_with_comment_and_location(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_location = f"s3://{BUCKET_NAME}/{database_name}.db" test_properties = { @@ -415,7 +415,7 @@ def test_create_namespace_with_comment_and_location(_bucket_initialize: None, mo assert properties["location"] == test_location -@mock_glue +@mock_aws def test_create_duplicated_namespace(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) test_catalog.create_namespace(namespace=database_name) @@ -426,7 +426,7 @@ def test_create_duplicated_namespace(_bucket_initialize: None, moto_endpoint_url test_catalog.create_namespace(namespace=database_name, properties={"test": "test"}) -@mock_glue +@mock_aws def test_drop_namespace(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) test_catalog.create_namespace(namespace=database_name) @@ -438,7 +438,7 @@ def test_drop_namespace(_bucket_initialize: None, moto_endpoint_url: str, databa assert len(loaded_database_list) == 0 -@mock_glue +@mock_aws def test_drop_non_empty_namespace( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: @@ -451,14 +451,14 @@ def test_drop_non_empty_namespace( test_catalog.drop_namespace(database_name) -@mock_glue +@mock_aws def test_drop_non_exist_namespace(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) with pytest.raises(NoSuchNamespaceError): test_catalog.drop_namespace(database_name) -@mock_glue +@mock_aws def test_load_namespace_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_location = f"s3://{BUCKET_NAME}/{database_name}.db" test_properties = { @@ -476,14 +476,14 @@ def test_load_namespace_properties(_bucket_initialize: None, moto_endpoint_url: assert v == test_properties[k] -@mock_glue +@mock_aws def test_load_non_exist_namespace_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) with pytest.raises(NoSuchNamespaceError): test_catalog.load_namespace_properties(database_name) -@mock_glue +@mock_aws def test_update_namespace_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_properties = { "comment": "this is a test description", @@ -508,7 +508,7 @@ def test_update_namespace_properties(_bucket_initialize: None, moto_endpoint_url test_catalog.drop_namespace(database_name) -@mock_glue +@mock_aws def test_load_empty_namespace_properties(_bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: test_catalog = GlueCatalog("glue", **{"s3.endpoint": moto_endpoint_url}) test_catalog.create_namespace(database_name) @@ -516,7 +516,7 @@ def test_load_empty_namespace_properties(_bucket_initialize: None, moto_endpoint assert listed_properties == {} -@mock_glue +@mock_aws def test_load_default_namespace_properties(_glue, _bucket_initialize: None, moto_endpoint_url: str, database_name: str) -> None: # type: ignore # simulate creating database with default settings through AWS Glue Web Console _glue.create_database(DatabaseInput={"Name": database_name}) @@ -525,7 +525,7 @@ def test_load_default_namespace_properties(_glue, _bucket_initialize: None, moto assert listed_properties == {} -@mock_glue +@mock_aws def test_update_namespace_properties_overlap_update_removal( _bucket_initialize: None, moto_endpoint_url: str, database_name: str ) -> None: @@ -546,7 +546,7 @@ def test_update_namespace_properties_overlap_update_removal( assert test_catalog.load_namespace_properties(database_name) == test_properties -@mock_glue +@mock_aws def test_passing_profile_name() -> None: session_properties: Dict[str, Any] = { "aws_access_key_id": "abc", @@ -566,7 +566,7 @@ def test_passing_profile_name() -> None: assert test_catalog.glue is mock_session().client() -@mock_glue +@mock_aws def test_commit_table_update_schema( _glue: boto3.client, _bucket_initialize: None, @@ -619,7 +619,7 @@ def test_commit_table_update_schema( assert storage_descriptor["Location"] == f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}" -@mock_glue +@mock_aws def test_commit_table_properties( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str ) -> None: diff --git a/tests/conftest.py b/tests/conftest.py index 063c0646fd..c0c3d10273 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -45,7 +45,7 @@ import boto3 import pytest -from moto import mock_dynamodb, mock_glue +from moto import mock_aws from pyiceberg import schema from pyiceberg.catalog import Catalog @@ -1784,23 +1784,24 @@ def moto_endpoint_url(moto_server: "ThreadedMotoServer") -> str: return _url -@pytest.fixture(name="_s3") +@pytest.fixture(name="_s3", scope="function") def fixture_s3(_aws_credentials: None, moto_endpoint_url: str) -> Generator[boto3.client, None, None]: """Yield a mocked S3 client.""" - yield boto3.client("s3", region_name="us-east-1", endpoint_url=moto_endpoint_url) + with mock_aws(): + yield boto3.client("s3", region_name="us-east-1", endpoint_url=moto_endpoint_url) @pytest.fixture(name="_glue") def fixture_glue(_aws_credentials: None) -> Generator[boto3.client, None, None]: """Yield a mocked glue client.""" - with mock_glue(): + with mock_aws(): yield boto3.client("glue", region_name="us-east-1") @pytest.fixture(name="_dynamodb") def fixture_dynamodb(_aws_credentials: None) -> Generator[boto3.client, None, None]: """Yield a mocked DynamoDB client.""" - with mock_dynamodb(): + with mock_aws(): yield boto3.client("dynamodb", region_name="us-east-1") @@ -1892,7 +1893,8 @@ def get_s3_path(bucket_name: str, database_name: Optional[str] = None, table_nam @pytest.fixture(name="s3", scope="module") def fixture_s3_client() -> boto3.client: - yield boto3.client("s3") + with mock_aws(): + yield boto3.client("s3") def clean_up(test_catalog: Catalog) -> None: From 937eb2d91e83bf051fa89fd704b69cd4f5e1f2e1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 20:34:09 -0800 Subject: [PATCH 079/169] Build: Bump mkdocs-material from 9.5.6 to 9.5.7 (#369) --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index c89ac82c74..3638d65b20 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -23,6 +23,6 @@ mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 -mkdocs-material==9.5.6 +mkdocs-material==9.5.7 mkdocs-material-extensions==1.3.1 mkdocs-section-index==0.3.8 From ad0fbaafd08b0a579aed863dc82135aa68c608bf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 20:34:54 -0800 Subject: [PATCH 080/169] Build: Bump mypy-boto3-glue from 1.34.32 to 1.34.35 (#371) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9d34a7a435..d893480485 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2225,13 +2225,13 @@ files = [ [[package]] name = "mypy-boto3-glue" -version = "1.34.32" -description = "Type annotations for boto3.Glue 1.34.32 service generated with mypy-boto3-builder 7.23.1" +version = "1.34.35" +description = "Type annotations for boto3.Glue 1.34.35 service generated with mypy-boto3-builder 7.23.1" optional = true python-versions = ">=3.8" files = [ - {file = "mypy-boto3-glue-1.34.32.tar.gz", hash = "sha256:f23fddd06613d699ff7a22bb4fa79478c903e07cd5dd32e70278da49d6212e65"}, - {file = "mypy_boto3_glue-1.34.32-py3-none-any.whl", hash = "sha256:7a1a0e4dfab2870cff5b0984c39d3fa4fd1820ea6fb40f807b7150e4d624d9c9"}, + {file = "mypy-boto3-glue-1.34.35.tar.gz", hash = "sha256:f8abe4f2e07d299a7b9b0dec30033a9877414e790ee49f274950a5b6d9dd3036"}, + {file = "mypy_boto3_glue-1.34.35-py3-none-any.whl", hash = "sha256:361b21c108ffae7868a25256aaeeca6f28fd1072cbeebebb9c4c4dbc3cbdc721"}, ] [package.dependencies] From f830840947321b9cafb4c24746c5b3e729e8346e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 20:47:23 -0800 Subject: [PATCH 081/169] Build: Bump adlfs from 2024.1.0 to 2024.2.0 (#372) --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index d893480485..e6e679a4b2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "adlfs" -version = "2024.1.0" +version = "2024.2.0" description = "Access Azure Datalake Gen1 with fsspec and dask" optional = true python-versions = ">=3.8" files = [ - {file = "adlfs-2024.1.0-py3-none-any.whl", hash = "sha256:7f5982785c7af34da879463ab2a046b0875116c83f97ed3b810e956244276764"}, - {file = "adlfs-2024.1.0.tar.gz", hash = "sha256:bece62abb8aa29e3a3dd2a5453de61133f481d743b6f65aab01ca1c1a0157b4c"}, + {file = "adlfs-2024.2.0-py3-none-any.whl", hash = "sha256:6514fb147032eea843e4b773c557df90cf6a929185edc7c97f7a12b91bfc1fa1"}, + {file = "adlfs-2024.2.0.tar.gz", hash = "sha256:860f5ddbd7f3c2553d84a101717dc5736e823305e0d51e8c0058bc85a7fa304d"}, ] [package.dependencies] @@ -4288,4 +4288,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "6e7fbfc5ac9579e08a6deccb0dd3d80bf11bc3113302772b07c8bb3329a0ae72" +content-hash = "2d2b08bb4e99f940d5a428593438ea5fe28eaec7962b3171ff340195463379d8" diff --git a/pyproject.toml b/pyproject.toml index 9fcb91b0c3..dcc91fbbe3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,7 @@ thrift = { version = ">=0.13.0,<1.0.0", optional = true } mypy-boto3-glue = { version = ">=1.28.18", optional = true } boto3 = { version = ">=1.24.59", optional = true } s3fs = { version = ">=2023.1.0,<2024.1.0", optional = true } -adlfs = { version = ">=2023.1.0,<2024.2.0", optional = true } +adlfs = { version = ">=2023.1.0,<2024.3.0", optional = true } gcsfs = { version = ">=2023.1.0,<2024.1.0", optional = true } psycopg2-binary = { version = ">=2.9.6", optional = true } sqlalchemy = { version = "^2.0.18", optional = true } From cc0fd860a5d4a48edac83e539674735fb3e86015 Mon Sep 17 00:00:00 2001 From: Jay Chia <17691182+jaychia@users.noreply.github.com> Date: Tue, 6 Feb 2024 07:04:38 -0800 Subject: [PATCH 082/169] Add Daft examples and code into PyIceberg docs and Table (#355) * Add Daft integration docs * Add to_daft() implementation * nit: slight amendment to show call * Proper dependency handling for poetry * lints * lint --------- Co-authored-by: Jay Chia Co-authored-by: Fokko Driesprong --- Makefile | 2 +- mkdocs/docs/api.md | 53 +++++++++++++++++++++++ mkdocs/docs/index.md | 1 + poetry.lock | 86 ++++++++++++++++++++++++++++++++++++- pyiceberg/table/__init__.py | 11 +++++ pyproject.toml | 6 +++ 6 files changed, 157 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 4226614bd0..c3e816ebd5 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ install-poetry: pip install poetry==1.7.1 install-dependencies: - poetry install -E pyarrow -E hive -E s3fs -E glue -E adlfs -E duckdb -E ray -E sql-postgres -E gcsfs -E sql-sqlite + poetry install -E pyarrow -E hive -E s3fs -E glue -E adlfs -E duckdb -E ray -E sql-postgres -E gcsfs -E sql-sqlite -E daft install: | install-poetry install-dependencies diff --git a/mkdocs/docs/api.md b/mkdocs/docs/api.md index 650d391807..53801922fc 100644 --- a/mkdocs/docs/api.md +++ b/mkdocs/docs/api.md @@ -636,3 +636,56 @@ print(ray_dataset.take(2)) }, ] ``` + +### Daft + +PyIceberg interfaces closely with Daft Dataframes (see also: [Daft integration with Iceberg](https://www.getdaft.io/projects/docs/en/latest/user_guide/integrations/iceberg.html)) which provides a full lazily optimized query engine interface on top of PyIceberg tables. + + + +!!! note "Requirements" + This requires [Daft to be installed](index.md). + + + +A table can be read easily into a Daft Dataframe: + +```python +df = table.to_daft() # equivalent to `daft.read_iceberg(table)` +df = df.where(df["trip_distance"] >= 10.0) +df = df.select("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime") +``` + +This returns a Daft Dataframe which is lazily materialized. Printing `df` will display the schema: + +``` +╭──────────┬───────────────────────────────┬───────────────────────────────╮ +│ VendorID ┆ tpep_pickup_datetime ┆ tpep_dropoff_datetime │ +│ --- ┆ --- ┆ --- │ +│ Int64 ┆ Timestamp(Microseconds, None) ┆ Timestamp(Microseconds, None) │ +╰──────────┴───────────────────────────────┴───────────────────────────────╯ + +(No data to display: Dataframe not materialized) +``` + +We can execute the Dataframe to preview the first few rows of the query with `df.show()`. + +This is correctly optimized to take advantage of Iceberg features such as hidden partitioning and file-level statistics for efficient reads. + +```python +df.show(2) +``` + +``` +╭──────────┬───────────────────────────────┬───────────────────────────────╮ +│ VendorID ┆ tpep_pickup_datetime ┆ tpep_dropoff_datetime │ +│ --- ┆ --- ┆ --- │ +│ Int64 ┆ Timestamp(Microseconds, None) ┆ Timestamp(Microseconds, None) │ +╞══════════╪═══════════════════════════════╪═══════════════════════════════╡ +│ 2 ┆ 2008-12-31T23:23:50.000000 ┆ 2009-01-01T00:34:31.000000 │ +├╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┼╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌┤ +│ 2 ┆ 2008-12-31T23:05:03.000000 ┆ 2009-01-01T16:10:18.000000 │ +╰──────────┴───────────────────────────────┴───────────────────────────────╯ + +(Showing first 2 rows) +``` diff --git a/mkdocs/docs/index.md b/mkdocs/docs/index.md index f1c1fa9f51..ffe3ab4681 100644 --- a/mkdocs/docs/index.md +++ b/mkdocs/docs/index.md @@ -51,6 +51,7 @@ You can mix and match optional dependencies depending on your needs: | pandas | Installs both PyArrow and Pandas | | duckdb | Installs both PyArrow and DuckDB | | ray | Installs PyArrow, Pandas, and Ray | +| daft | Installs Daft | | s3fs | S3FS as a FileIO implementation to interact with the object store | | adlfs | ADLFS as a FileIO implementation to interact with the object store | | snappy | Support for snappy Avro compression | diff --git a/poetry.lock b/poetry.lock index e6e679a4b2..8e2241a001 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1157,6 +1157,10 @@ files = [ {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, ] +[package.dependencies] +aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} +requests = {version = "*", optional = true, markers = "extra == \"http\""} + [package.extras] abfs = ["adlfs"] adl = ["adlfs"] @@ -1205,6 +1209,38 @@ requests = "*" crc = ["crcmod"] gcsfuse = ["fusepy"] +[[package]] +name = "getdaft" +version = "0.2.12" +description = "A Distributed DataFrame library for large scale complex data processing." +optional = true +python-versions = ">=3.7" +files = [ + {file = "getdaft-0.2.12-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:ec3d9693e6d859a7604e7597695e51383bdd4a0100fa892a030510c81dc7a214"}, + {file = "getdaft-0.2.12-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:44f12f57b8dfd35c6a8ff9da749f4243c9558403e0c65720298f65079436e505"}, + {file = "getdaft-0.2.12-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:565713fa393970b3a891e9c02e8fb20ebce9b7b1278a00299e5159b71a76fac7"}, + {file = "getdaft-0.2.12-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed061e93c55cb757061eac48675f4d9bb8960ed3ee1b2df64ade848946875f2f"}, + {file = "getdaft-0.2.12-cp37-abi3-win_amd64.whl", hash = "sha256:7970903d4986acbf0a99e812d71fd12c9b63911cb7b601085354864e032dfa94"}, + {file = "getdaft-0.2.12.tar.gz", hash = "sha256:9a8698cf04b8e6ed11c1cc960498d438748e8d1a316b4da2e7faed62e90f00b8"}, +] + +[package.dependencies] +fsspec = {version = "*", extras = ["http"]} +psutil = "*" +pyarrow = ">=6.0.1" +tqdm = "*" +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["getdaft[aws,azure,gcp,iceberg,numpy,pandas,ray]"] +aws = ["s3fs"] +azure = ["adlfs"] +gcp = ["gcsfs"] +iceberg = ["packaging", "pyiceberg (>=0.4.0)"] +numpy = ["numpy"] +pandas = ["pandas"] +ray = ["packaging", "ray[client,data] (>=2.0.0)"] + [[package]] name = "google-api-core" version = "2.15.0" @@ -2601,6 +2637,34 @@ files = [ {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, ] +[[package]] +name = "psutil" +version = "5.9.8" +description = "Cross-platform lib for process and system monitoring in Python." +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + [[package]] name = "psycopg2-binary" version = "2.9.9" @@ -3908,6 +3972,26 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.9.0" @@ -4271,6 +4355,7 @@ cffi = ["cffi (>=1.11)"] [extras] adlfs = ["adlfs"] +daft = ["getdaft"] duckdb = ["duckdb", "pyarrow"] dynamodb = ["boto3"] gcsfs = ["gcsfs"] @@ -4288,4 +4373,3 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "2d2b08bb4e99f940d5a428593438ea5fe28eaec7962b3171ff340195463379d8" diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 7692bb0bee..b9d44b7c4d 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -120,6 +120,7 @@ from pyiceberg.utils.datetime import datetime_to_millis if TYPE_CHECKING: + import daft import pandas as pd import pyarrow as pa import ray @@ -1381,6 +1382,16 @@ def to_ray(self) -> ray.data.dataset.Dataset: return ray.data.from_arrow(self.to_arrow()) + def to_daft(self) -> daft.DataFrame: + """Read a Daft DataFrame lazily from this Iceberg table. + + Returns: + daft.DataFrame: Unmaterialized Daft Dataframe created from the Iceberg table + """ + import daft + + return daft.read_iceberg(self) + class MoveOperation(Enum): First = 1 diff --git a/pyproject.toml b/pyproject.toml index dcc91fbbe3..a741ee44fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,6 +70,7 @@ adlfs = { version = ">=2023.1.0,<2024.3.0", optional = true } gcsfs = { version = ">=2023.1.0,<2024.1.0", optional = true } psycopg2-binary = { version = ">=2.9.6", optional = true } sqlalchemy = { version = "^2.0.18", optional = true } +getdaft = { version = ">=0.2.12", optional = true } [tool.poetry.dev-dependencies] pytest = "7.4.4" @@ -105,6 +106,7 @@ pyarrow = ["pyarrow"] pandas = ["pandas", "pyarrow"] duckdb = ["duckdb", "pyarrow"] ray = ["ray", "pyarrow", "pandas"] +daft = ["getdaft"] snappy = ["python-snappy"] hive = ["thrift"] s3fs = ["s3fs"] @@ -263,6 +265,10 @@ ignore_missing_imports = true module = "ray.*" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "daft.*" +ignore_missing_imports = true + [[tool.mypy.overrides]] module = "pyparsing.*" ignore_missing_imports = true From 8789fc287755c8e617513f7d136b8cccc4800508 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Tue, 6 Feb 2024 10:26:56 -0500 Subject: [PATCH 083/169] Bug Fix: Handle null vals from later row_groups in StatsAggregator (#379) * handle null vals correctly in StatsAggregator * return * Apply suggestions from code review Co-authored-by: Fokko Driesprong --------- Co-authored-by: Fokko Driesprong --- pyiceberg/io/pyarrow.py | 14 +++++++++---- tests/io/test_pyarrow.py | 44 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 4 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 99c1af5ad6..9726451065 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -1331,11 +1331,17 @@ def __init__(self, iceberg_type: PrimitiveType, physical_type_string: str, trunc def serialize(self, value: Any) -> bytes: return to_bytes(self.primitive_type, value) - def update_min(self, val: Any) -> None: - self.current_min = val if self.current_min is None else min(val, self.current_min) + def update_min(self, val: Optional[Any]) -> None: + if self.current_min is None: + self.current_min = val + elif val is not None: + self.current_min = min(val, self.current_min) - def update_max(self, val: Any) -> None: - self.current_max = val if self.current_max is None else max(val, self.current_max) + def update_max(self, val: Optional[Any]) -> None: + if self.current_max is None: + self.current_max = val + elif val is not None: + self.current_max = max(val, self.current_max) def min_as_bytes(self) -> Optional[bytes]: if self.current_min is None: diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index 0628ed4893..745de1a3d3 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -18,6 +18,7 @@ import os import tempfile +from datetime import date from typing import Any, List, Optional from unittest.mock import MagicMock, patch from uuid import uuid4 @@ -59,7 +60,9 @@ ICEBERG_SCHEMA, PyArrowFile, PyArrowFileIO, + StatsAggregator, _ConvertToArrowSchema, + _primitive_to_physical, _read_deletes, expression_to_pyarrow, project_table, @@ -84,6 +87,7 @@ LongType, MapType, NestedField, + PrimitiveType, StringType, StructType, TimestampType, @@ -1666,3 +1670,43 @@ def check_results(location: str, expected_schema: str, expected_netloc: str, exp def test_make_compatible_name() -> None: assert make_compatible_name("label/abc") == "label_x2Fabc" assert make_compatible_name("label?abc") == "label_x3Fabc" + + +@pytest.mark.parametrize( + "vals, primitive_type, expected_result", + [ + ([None, 2, 1], IntegerType(), 1), + ([1, None, 2], IntegerType(), 1), + ([None, None, None], IntegerType(), None), + ([None, date(2024, 2, 4), date(2024, 1, 2)], DateType(), date(2024, 1, 2)), + ([date(2024, 1, 2), None, date(2024, 2, 4)], DateType(), date(2024, 1, 2)), + ([None, None, None], DateType(), None), + ], +) +def test_stats_aggregator_update_min(vals: List[Any], primitive_type: PrimitiveType, expected_result: Any) -> None: + stats = StatsAggregator(primitive_type, _primitive_to_physical(primitive_type)) + + for val in vals: + stats.update_min(val) + + assert stats.current_min == expected_result + + +@pytest.mark.parametrize( + "vals, primitive_type, expected_result", + [ + ([None, 2, 1], IntegerType(), 2), + ([1, None, 2], IntegerType(), 2), + ([None, None, None], IntegerType(), None), + ([None, date(2024, 2, 4), date(2024, 1, 2)], DateType(), date(2024, 2, 4)), + ([date(2024, 1, 2), None, date(2024, 2, 4)], DateType(), date(2024, 2, 4)), + ([None, None, None], DateType(), None), + ], +) +def test_stats_aggregator_update_max(vals: List[Any], primitive_type: PrimitiveType, expected_result: Any) -> None: + stats = StatsAggregator(primitive_type, _primitive_to_physical(primitive_type)) + + for val in vals: + stats.update_max(val) + + assert stats.current_max == expected_result From 12e3353262d15bcccfb0742fd4667d3369c94d2b Mon Sep 17 00:00:00 2001 From: Jonas Haag Date: Tue, 6 Feb 2024 16:44:30 +0100 Subject: [PATCH 084/169] Do not set Parquet version (#377) * Do not set Parquet version * Fix filesizes --------- Co-authored-by: Fokko Driesprong --- pyiceberg/io/pyarrow.py | 2 +- tests/integration/test_writes.py | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 9726451065..91d8452eab 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -1734,7 +1734,7 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: fo = table.io.new_output(file_path) with fo.create(overwrite=True) as fos: - with pq.ParquetWriter(fos, schema=file_schema, version="1.0", **parquet_writer_kwargs) as writer: + with pq.ParquetWriter(fos, schema=file_schema, **parquet_writer_kwargs) as writer: writer.write_table(task.df) data_file = DataFile( diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index a65d98fc9e..c08916bc68 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -357,39 +357,39 @@ def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_wi assert summaries[0] == { 'added-data-files': '1', - 'added-files-size': '5437', + 'added-files-size': '5459', 'added-records': '3', 'total-data-files': '1', 'total-delete-files': '0', 'total-equality-deletes': '0', - 'total-files-size': '5437', + 'total-files-size': '5459', 'total-position-deletes': '0', 'total-records': '3', } assert summaries[1] == { 'added-data-files': '1', - 'added-files-size': '5437', + 'added-files-size': '5459', 'added-records': '3', 'total-data-files': '2', 'total-delete-files': '0', 'total-equality-deletes': '0', - 'total-files-size': '10874', + 'total-files-size': '10918', 'total-position-deletes': '0', 'total-records': '6', } assert summaries[2] == { 'added-data-files': '1', - 'added-files-size': '5437', + 'added-files-size': '5459', 'added-records': '3', 'deleted-data-files': '2', 'deleted-records': '6', - 'removed-files-size': '10874', + 'removed-files-size': '10918', 'total-data-files': '1', 'total-delete-files': '0', 'total-equality-deletes': '0', - 'total-files-size': '5437', + 'total-files-size': '5459', 'total-position-deletes': '0', 'total-records': '3', } @@ -555,12 +555,12 @@ def test_summaries_with_only_nulls( assert summaries[1] == { 'added-data-files': '1', - 'added-files-size': '4217', + 'added-files-size': '4239', 'added-records': '2', 'total-data-files': '1', 'total-delete-files': '0', 'total-equality-deletes': '0', - 'total-files-size': '4217', + 'total-files-size': '4239', 'total-position-deletes': '0', 'total-records': '2', } From 66ba19054d60bc7093dfeae4b65441c9a357720a Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 6 Feb 2024 16:58:20 +0100 Subject: [PATCH 085/169] release: Bump the moto version (#380) Same as in https://github.com/apache/iceberg-python/pull/373 --- .github/workflows/python-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 03e27f012e..5192d711ce 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -67,7 +67,7 @@ jobs: # Ignore 32 bit architectures CIBW_ARCHS: "auto64" CIBW_PROJECT_REQUIRES_PYTHON: ">=3.8,<3.12" - CIBW_TEST_REQUIRES: "pytest==7.4.2 moto==4.2.2" + CIBW_TEST_REQUIRES: "pytest==7.4.2 moto==5.0.1" CIBW_TEST_EXTRAS: "s3fs,glue" CIBW_TEST_COMMAND: "pytest {project}/tests/avro/test_decoder.py" # There is an upstream issue with installing on MacOSX From a1192a3846c975f7196d2fb4ce45e61160227b2a Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 7 Feb 2024 02:36:48 +0100 Subject: [PATCH 086/169] Add `syun64` as collaborator (#383) --- .asf.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.asf.yaml b/.asf.yaml index 2cc41779e3..b1f557e903 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -44,6 +44,7 @@ github: projects: true collaborators: # Note: the number of collaborators is limited to 10 - ajantha-bhat + - syun64 ghp_branch: gh-pages ghp_path: / From 44b2e218dde9e8ad26623aa3577aa8a80ba62e17 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 06:22:02 +0100 Subject: [PATCH 087/169] Build: Bump ray from 2.7.2 to 2.9.2 (#384) Bumps [ray](https://github.com/ray-project/ray) from 2.7.2 to 2.9.2. - [Release notes](https://github.com/ray-project/ray/releases) - [Commits](https://github.com/ray-project/ray/commits/ray-2.9.2) --- updated-dependencies: - dependency-name: ray dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 110 +++++++++++++------------------------------------ pyproject.toml | 2 +- 2 files changed, 30 insertions(+), 82 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8e2241a001..91804f31b5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2342,51 +2342,6 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] -[[package]] -name = "numpy" -version = "1.26.3" -description = "Fundamental package for array computing in Python" -optional = true -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, - {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, - {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, - {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, - {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, - {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, - {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, - {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, - {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, - {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, - {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, -] - [[package]] name = "oauthlib" version = "3.2.2" @@ -3310,35 +3265,31 @@ files = [ [[package]] name = "ray" -version = "2.7.2" +version = "2.9.2" description = "Ray provides a simple, universal API for building distributed applications." optional = true -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "ray-2.7.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:4c415fc90df8a29cf0d594774a456caecf05103d772d487abb6fdda3397ee68e"}, - {file = "ray-2.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0434fe4eb416e5e4f39dbafdd1ead3407f72589cae19d1739b90a35d951e17bc"}, - {file = "ray-2.7.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:b8966bc9e8d005600bdfdb1bfe19241ffbf76b3c111d3931cad2200bc2190d58"}, - {file = "ray-2.7.2-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:162f7e637067a6408f3041755b8df649624fb7200dc7566b142cc5877580b6e3"}, - {file = "ray-2.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:9411fc45e94608c78b0bf77e3ed1dfddbafa0c16e12bcec81cbce26c7ab0791c"}, - {file = "ray-2.7.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:b0db75f9c9116c80d6846498b099f2738ad8b9081a94c625365c6dc95baf50bc"}, - {file = "ray-2.7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20c782af16c9ec5a6ef9c933cea0b2be1a81a90162a4f14415b8692e5a3e1ceb"}, - {file = "ray-2.7.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9a94ee86699fcc42c50ff12b482ad817e71027ceae5c7c26af251931b32ee6d3"}, - {file = "ray-2.7.2-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:75cfbf61383b5e6940cced2033c579e59458fd07d20ba0cf6bcf75f62fb101a2"}, - {file = "ray-2.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:33f99896e8eb19bcd4b029d3b22d9c50e56acd9a3774ff2b074e8c6926124cff"}, - {file = "ray-2.7.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:b8fbf0be759f0a575ec1f504327bffb5d2340bca33fc37beafed91cb78bfe5f5"}, - {file = "ray-2.7.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d5bd397b19b4faf8df82c46dd705b17cc976efefda48145b595c97dd117fa79f"}, - {file = "ray-2.7.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:ee681a5707d9e91850490f42ee007ed8c1c54ea38f5c831aada347f3cc1d9b03"}, - {file = "ray-2.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:155ec98c73c55086e855b2e1f7d6154301556c14bb85d69b17cab087bfcf5268"}, - {file = "ray-2.7.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:94817f7ae24b78213cbcafdeb44e44ae7132e617693725e2a1bbac0461a14382"}, - {file = "ray-2.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3217cbabb31bbfa87a594aaf27371e722a6d527621ef3b01b3f4603b2997cc0d"}, - {file = "ray-2.7.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:19bc3eaabc2635d3a53f43db8198d1a56263a0697401fb7716f90ff60415f2be"}, - {file = "ray-2.7.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:158c424cad4480e93a90d9460a8dd3390bafab220220bb8678f54bc1afecda43"}, - {file = "ray-2.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:f1a41c3511ea90fef47b95721e6589427955503a3fcde1615c6371eeb2c515ab"}, - {file = "ray-2.7.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:6820a6369536e070a72d8bf439cc7085b17f0c5102fb51860b330bf231382e7e"}, - {file = "ray-2.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e226f8739a4726c445fc41dbb41f7440fa202ad97099ee9d35cc8bc91f1d2c5f"}, - {file = "ray-2.7.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:29930d38bd563d57e3457f85dddd0347c79018d12ef42db35abc648c127dce81"}, - {file = "ray-2.7.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:90eb9415f4abc39ce054f2148dcb97fbb9ae725dd150ea7b030ec9d7cf6859f2"}, - {file = "ray-2.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:8010e9f0ac04d901a96b3876def3899867f705148a7d820c08c02c7322d08217"}, + {file = "ray-2.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:f879522e7d9b809d3aa28fb627ab87344b31cf79e1829b9b67f0581305e2bb84"}, + {file = "ray-2.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccbcf7f57bf10c52b3ebcec6e8d9114491ef12a20255e70ba0d5f12a81e9391c"}, + {file = "ray-2.9.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a3059e1d4287db33811e7604b4ecc1aa79dc2d745b49e5ec3415060da61cb749"}, + {file = "ray-2.9.2-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:11a0fd3f75ca07f727b1e83c3b2c74e75dc1fe0aba99a416a865f83e7ff23620"}, + {file = "ray-2.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:1e5c4733314bab19d89b373836899e76b2ab839d45f59966b431c89076feaab7"}, + {file = "ray-2.9.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1a734c1c586e666f5024b46405c3df52b634977c9565bca16a01d6fefc457578"}, + {file = "ray-2.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:47975fcb4b6e6cadd35da4b78f3e643f5ab6e99a688d79980ca2f0dca345d034"}, + {file = "ray-2.9.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:7d7ed76fa40abe81eefb158d505f046de0614d994fc8027f5b05889824503257"}, + {file = "ray-2.9.2-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:83fd7961d39da5ae68731be430891a21a13d0257bc25ab6adf13712297e309fa"}, + {file = "ray-2.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:5929ac8221ba3b6446cd0885a0ade50cfaaecacba771dfeed57ead8b5c6fdd14"}, + {file = "ray-2.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:61b5a742f1f249e92893433720423f729018d40ee26a015b6a12b443d0e2e3eb"}, + {file = "ray-2.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:51552b1142944e13ba1da0c44395a627701c563fbe3f6c490001e6e4fd0ee011"}, + {file = "ray-2.9.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8b715b0ad9aa027836ecb7dc33b3a2dfc91c5d9d22a0ddf72c0844df5d641fca"}, + {file = "ray-2.9.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:e1a35e2a3de4e3875bd1e76770fb89149adc773193a5e79488db4047ef14ffd0"}, + {file = "ray-2.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d97f674c675370550ec4347e7e8dee0f99e38dd8f220ff8acb8ca15c208d73a"}, + {file = "ray-2.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:efa2c60ab11f41e4d43a227cd6bf491f9f2f8ed820c482c7d8d86a2412b6fd05"}, + {file = "ray-2.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5ebd71ef2e4d76752a1ff048e9d4c22811c7e990e8d4e3b30974b3e4099411b6"}, + {file = "ray-2.9.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d84064ab3aa2868991a98dc6a54cc2221abcaf9406eb95fa2ec0f66006585f92"}, + {file = "ray-2.9.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:bc95efd035dcdc2f2b549ce3e13c5abf2043f3136b8a5980d77f4f098a9a6796"}, + {file = "ray-2.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:aea2ad4dbad2d6bd21ba17f7a2fcf762f53d8bcbc30b9d6916245e447a971e48"}, ] [package.dependencies] @@ -3348,26 +3299,22 @@ filelock = "*" frozenlist = "*" jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" -numpy = [ - {version = ">=1.16", markers = "python_version < \"3.9\""}, - {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, -] packaging = "*" protobuf = ">=3.15.3,<3.19.5 || >3.19.5" pyyaml = "*" requests = "*" [package.extras] -air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml", "ray-cpp (==2.7.2)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi (<=0.108.0)", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi (<=0.108.0)", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.9.2)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] client = ["grpcio (!=1.56.0)"] -cpp = ["ray-cpp (==2.7.2)"] +cpp = ["ray-cpp (==2.9.2)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] -default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "virtualenv (>=20.0.24,<20.21.1)"] +default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] rllib = ["dm-tree", "fsspec", "gymnasium (==0.28.1)", "lz4", "pandas", "pyarrow (>=6.0.1)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "tensorboardX (>=1.9)", "typer"] -serve = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] -serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +serve = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi (<=0.108.0)", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi (<=0.108.0)", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] train = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] @@ -4373,3 +4320,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" +content-hash = "d063b6db5f333e4cf881a9778b692e1a9d7926d629141cc8d103520c1fccc6f5" diff --git a/pyproject.toml b/pyproject.toml index a741ee44fc..2717994127 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ zstandard = ">=0.13.0,<1.0.0" pyarrow = { version = ">=9.0.0,<16.0.0", optional = true } pandas = { version = ">=1.0.0,<3.0.0", optional = true } duckdb = { version = ">=0.5.0,<1.0.0", optional = true } -ray = { version = ">=2.0.0,<2.8.0", optional = true } +ray = { version = ">=2.0.0,<2.10.0", optional = true } python-snappy = { version = ">=0.6.0,<1.0.0", optional = true } thrift = { version = ">=0.13.0,<1.0.0", optional = true } mypy-boto3-glue = { version = ">=1.28.18", optional = true } From da8532e070d5a4b0fdb7ac2eaf5a531c2b30ac6a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 07:57:45 +0100 Subject: [PATCH 088/169] Build: Bump boto3 from 1.34.27 to 1.34.34 (#385) Bumps [boto3](https://github.com/boto/boto3) from 1.34.27 to 1.34.34. - [Release notes](https://github.com/boto/boto3/releases) - [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst) - [Commits](https://github.com/boto/boto3/compare/1.34.27...1.34.34) --- updated-dependencies: - dependency-name: boto3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/poetry.lock b/poetry.lock index 91804f31b5..84a62d6b80 100644 --- a/poetry.lock +++ b/poetry.lock @@ -25,24 +25,24 @@ tests = ["arrow", "dask[dataframe]", "docker", "pytest", "pytest-mock"] [[package]] name = "aiobotocore" -version = "2.11.1" +version = "2.11.2" description = "Async client for aws services using botocore and aiohttp" optional = true python-versions = ">=3.8" files = [ - {file = "aiobotocore-2.11.1-py3-none-any.whl", hash = "sha256:904a7ad7cc8671d662cfd596906dafe839118ea2a66332c37908e3dcfdee1e45"}, - {file = "aiobotocore-2.11.1.tar.gz", hash = "sha256:0b095af50da2d6f94e93ca959e2a4876f0f0d84d534b61b21d8e050832d04ab6"}, + {file = "aiobotocore-2.11.2-py3-none-any.whl", hash = "sha256:487fede588040bfa3a43df945275c28c1c73ca75bf705295adb9fbadd2e89be7"}, + {file = "aiobotocore-2.11.2.tar.gz", hash = "sha256:6dd7352248e3523019c5a54a395d2b1c31080697fc80a9ad2672de4eec8c7abd"}, ] [package.dependencies] aiohttp = ">=3.7.4.post0,<4.0.0" aioitertools = ">=0.5.1,<1.0.0" -botocore = ">=1.33.2,<1.34.28" +botocore = ">=1.33.2,<1.34.35" wrapt = ">=1.10.10,<2.0.0" [package.extras] -awscli = ["awscli (>=1.31.2,<1.32.28)"] -boto3 = ["boto3 (>=1.33.2,<1.34.28)"] +awscli = ["awscli (>=1.31.2,<1.32.35)"] +boto3 = ["boto3 (>=1.33.2,<1.34.35)"] [[package]] name = "aiohttp" @@ -332,17 +332,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.27" +version = "1.34.34" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.8" files = [ - {file = "boto3-1.34.27-py3-none-any.whl", hash = "sha256:3626db4ba9fbb1b58c8fe923da5ed670873b3d881a102956ea19d3b69cd097cc"}, - {file = "boto3-1.34.27.tar.gz", hash = "sha256:ebdd938019f3df2e7b50585353963d4553faf3fbb7b2085c440107fa6caa233b"}, + {file = "boto3-1.34.34-py3-none-any.whl", hash = "sha256:33a8b6d9136fa7427160edb92d2e50f2035f04e9d63a2d1027349053e12626aa"}, + {file = "boto3-1.34.34.tar.gz", hash = "sha256:b2f321e20966f021ec800b7f2c01287a3dd04fc5965acdfbaa9c505a24ca45d1"}, ] [package.dependencies] -botocore = ">=1.34.27,<1.35.0" +botocore = ">=1.34.34,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -351,13 +351,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.27" +version = "1.34.34" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.8" files = [ - {file = "botocore-1.34.27-py3-none-any.whl", hash = "sha256:1c10f247136ad17b6ef1588c1e043e294dbaebdebe9ce84dc56713029f515c53"}, - {file = "botocore-1.34.27.tar.gz", hash = "sha256:a0e68ba264275b358b8c1cca604161f4d9465cf7847d73e929543a9f30ff22d1"}, + {file = "botocore-1.34.34-py3-none-any.whl", hash = "sha256:cd060b0d88ebb2b893f1411c1db7f2ba66cc18e52dcc57ad029564ef5fec437b"}, + {file = "botocore-1.34.34.tar.gz", hash = "sha256:54093dc97372bb7683f5c61a279aa8240408abf3b2cc494ae82a9a90c1b784b5"}, ] [package.dependencies] From 853a77c5d24280a1048ebcef9a9bfe0cedb642ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 07:58:19 +0100 Subject: [PATCH 089/169] Build: Bump pydantic from 2.6.0 to 2.6.1 (#386) Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.6.0 to 2.6.1. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v2.6.0...v2.6.1) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 168 ++++++++++++++++++++++++++-------------------------- 1 file changed, 84 insertions(+), 84 deletions(-) diff --git a/poetry.lock b/poetry.lock index 84a62d6b80..b3a68cce73 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2812,18 +2812,18 @@ files = [ [[package]] name = "pydantic" -version = "2.6.0" +version = "2.6.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, - {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.1" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -2831,90 +2831,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.1" +version = "2.16.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, - {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, - {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, - {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, - {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, - {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, - {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, - {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, - {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, - {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, - {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, - {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, - {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, - {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] From a7794cad8e09c43f26e7be627c4e999d330c6cec Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 7 Feb 2024 14:30:15 +0100 Subject: [PATCH 090/169] Arrow: Support Arrow large-string (#382) --- pyiceberg/io/pyarrow.py | 2 +- tests/io/test_pyarrow_visitor.py | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 91d8452eab..904fab2e28 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -864,7 +864,7 @@ def primitive(self, primitive: pa.DataType) -> PrimitiveType: elif isinstance(primitive, pa.Decimal128Type): primitive = cast(pa.Decimal128Type, primitive) return DecimalType(primitive.precision, primitive.scale) - elif pa.types.is_string(primitive): + elif pa.types.is_string(primitive) or pa.types.is_large_string(primitive): return StringType() elif pa.types.is_date32(primitive): return DateType() diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index c7f364b920..c30a53a4d4 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -272,6 +272,15 @@ def test_round_schema_conversion_nested(table_schema_nested: Schema) -> None: assert actual == expected +def test_round_schema_large_string() -> None: + schema = pa.schema([pa.field("animals", pa.large_string())]) + actual = str(pyarrow_to_schema(schema, name_mapping=NameMapping([MappedField(field_id=1, names=["animals"])]))) + expected = """table { + 1: animals: optional string +}""" + assert actual == expected + + def test_simple_schema_has_missing_ids() -> None: schema = pa.schema([ pa.field('foo', pa.string(), nullable=False), From cec051f230edfb584f1267e505ee218305389c11 Mon Sep 17 00:00:00 2001 From: Jay Chia <17691182+jaychia@users.noreply.github.com> Date: Wed, 7 Feb 2024 05:30:34 -0800 Subject: [PATCH 091/169] Add tests and fixes for Daft integration (#381) * Implement to_daft on Table instead of Scan * Add integration tests --------- Co-authored-by: Jay Chia --- pyiceberg/table/__init__.py | 20 ++++++++++---------- tests/integration/test_reads.py | 22 ++++++++++++++++++++++ 2 files changed, 32 insertions(+), 10 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index b9d44b7c4d..1feffc60ed 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -1025,6 +1025,16 @@ def __repr__(self) -> str: result_str = f"{table_name}(\n {schema_str}\n),\n{partition_str},\n{sort_order_str},\n{snapshot_str}" return result_str + def to_daft(self) -> daft.DataFrame: + """Read a Daft DataFrame lazily from this Iceberg table. + + Returns: + daft.DataFrame: Unmaterialized Daft Dataframe created from the Iceberg table + """ + import daft + + return daft.read_iceberg(self) + class StaticTable(Table): """Load a table directly from a metadata file (i.e., without using a catalog).""" @@ -1382,16 +1392,6 @@ def to_ray(self) -> ray.data.dataset.Dataset: return ray.data.from_arrow(self.to_arrow()) - def to_daft(self) -> daft.DataFrame: - """Read a Daft DataFrame lazily from this Iceberg table. - - Returns: - daft.DataFrame: Unmaterialized Daft Dataframe created from the Iceberg table - """ - import daft - - return daft.read_iceberg(self) - class MoveOperation(Enum): First = 1 diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index b35b348638..d487a6477e 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -178,6 +178,28 @@ def test_pyarrow_limit(catalog: Catalog) -> None: assert len(full_result) == 10 +@pytest.mark.integration +@pytest.mark.filterwarnings("ignore") +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_daft_nan(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") + df = table_test_null_nan_rewritten.to_daft() + assert df.count_rows() == 3 + assert math.isnan(df.to_pydict()["col_numeric"][0]) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_daft_nan_rewritten(catalog: Catalog) -> None: + table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten") + df = table_test_null_nan_rewritten.to_daft() + df = df.where(df["col_numeric"].float.is_nan()) + df = df.select("idx", "col_numeric") + assert df.count_rows() == 1 + assert df.to_pydict()["idx"][0] == 1 + assert math.isnan(df.to_pydict()["col_numeric"][0]) + + @pytest.mark.integration @pytest.mark.filterwarnings("ignore") @pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) From 8c2b03ba35af4ed2ef0742cb5ed67d57a0b791bb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 19:14:04 -0800 Subject: [PATCH 092/169] Build: Bump mkdocs-material from 9.5.7 to 9.5.8 (#395) --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 3638d65b20..b247aaf4b3 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -23,6 +23,6 @@ mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 -mkdocs-material==9.5.7 +mkdocs-material==9.5.8 mkdocs-material-extensions==1.3.1 mkdocs-section-index==0.3.8 From d7dcd99f7098d7a61f4d8da193ffd04cd7b63a47 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 8 Feb 2024 08:46:29 +0100 Subject: [PATCH 093/169] Arrow: Support int8 and int16 types (#391) I've checked with Spark, and here byte and short types are converted to integers. I think it makes sense to do this for Arrow as well. --- pyiceberg/io/pyarrow.py | 13 +++++++++---- tests/io/test_pyarrow_visitor.py | 12 ++++++++++++ 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 904fab2e28..1b14771624 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -853,10 +853,15 @@ def map(self, map_type: pa.MapType, key_result: IcebergType, value_result: Icebe def primitive(self, primitive: pa.DataType) -> PrimitiveType: if pa.types.is_boolean(primitive): return BooleanType() - elif pa.types.is_int32(primitive): - return IntegerType() - elif pa.types.is_int64(primitive): - return LongType() + elif pa.types.is_integer(primitive): + width = primitive.bit_width + if width <= 32: + return IntegerType() + elif width <= 64: + return LongType() + else: + # Does not exist (yet) + raise TypeError(f"Unsupported integer type: {primitive}") elif pa.types.is_float32(primitive): return FloatType() elif pa.types.is_float64(primitive): diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index c30a53a4d4..c6ba18c7b0 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -84,6 +84,18 @@ def test_pyarrow_boolean_to_iceberg() -> None: assert visit(converted_iceberg_type, _ConvertToArrowSchema()) == pyarrow_type +def test_pyarrow_int8_to_iceberg() -> None: + pyarrow_type = pa.int8() + converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) + assert converted_iceberg_type == IntegerType() + + +def test_pyarrow_int16_to_iceberg() -> None: + pyarrow_type = pa.int16() + converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) + assert converted_iceberg_type == IntegerType() + + def test_pyarrow_int32_to_iceberg() -> None: pyarrow_type = pa.int32() converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) From ef33b9db6e45a6f2388a1a3dfba7161df47c9747 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Thu, 8 Feb 2024 02:50:46 -0500 Subject: [PATCH 094/169] Bug: Rest Catalog update partition-spec and sort-order when schema is created (#392) * also update partition spec and sort order when fresh schema is created * fresh-schema * create table integrity test * undo test change --- pyiceberg/catalog/rest.py | 14 +++++++----- tests/integration/test_rest_schema.py | 32 +++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 6 deletions(-) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 765f04b128..b4b9f722b5 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -57,7 +57,7 @@ TableAlreadyExistsError, UnauthorizedError, ) -from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec +from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec, assign_fresh_partition_spec_ids from pyiceberg.schema import Schema, assign_fresh_schema_ids from pyiceberg.table import ( CommitTableRequest, @@ -66,7 +66,7 @@ TableIdentifier, TableMetadata, ) -from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder +from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder, assign_fresh_sort_order_ids from pyiceberg.typedef import EMPTY_DICT, UTF8, IcebergBaseModel if TYPE_CHECKING: @@ -448,15 +448,17 @@ def create_table( properties: Properties = EMPTY_DICT, ) -> Table: iceberg_schema = self._convert_schema_if_needed(schema) - iceberg_schema = assign_fresh_schema_ids(iceberg_schema) + fresh_schema = assign_fresh_schema_ids(iceberg_schema) + fresh_partition_spec = assign_fresh_partition_spec_ids(partition_spec, iceberg_schema, fresh_schema) + fresh_sort_order = assign_fresh_sort_order_ids(sort_order, iceberg_schema, fresh_schema) namespace_and_table = self._split_identifier_for_path(identifier) request = CreateTableRequest( name=namespace_and_table["table"], location=location, - table_schema=iceberg_schema, - partition_spec=partition_spec, - write_order=sort_order, + table_schema=fresh_schema, + partition_spec=fresh_partition_spec, + write_order=fresh_sort_order, properties=properties, ) serialized_json = request.model_dump_json().encode(UTF8) diff --git a/tests/integration/test_rest_schema.py b/tests/integration/test_rest_schema.py index d844e6d6c0..a3320e4d3a 100644 --- a/tests/integration/test_rest_schema.py +++ b/tests/integration/test_rest_schema.py @@ -20,8 +20,11 @@ from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.exceptions import CommitFailedException, NoSuchTableError, ValidationError +from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema, prune_columns from pyiceberg.table import Table, UpdateSchema +from pyiceberg.table.sorting import SortField, SortOrder +from pyiceberg.transforms import IdentityTransform from pyiceberg.types import ( BinaryType, BooleanType, @@ -2497,3 +2500,32 @@ def test_two_add_schemas_in_a_single_transaction(catalog: Catalog) -> None: assert "Updates in a single commit need to be unique, duplicate: " in str( exc_info.value ) + + +@pytest.mark.integration +def test_create_table_integrity_after_fresh_assignment(catalog: Catalog) -> None: + schema = Schema( + NestedField(field_id=5, name="col_uuid", field_type=UUIDType(), required=False), + NestedField(field_id=4, name="col_fixed", field_type=FixedType(25), required=False), + ) + partition_spec = PartitionSpec( + PartitionField(source_id=5, field_id=1000, transform=IdentityTransform(), name="col_uuid"), spec_id=0 + ) + sort_order = SortOrder(SortField(source_id=4, transform=IdentityTransform())) + tbl_name = "default.test_create_integrity" + try: + catalog.drop_table(tbl_name) + except NoSuchTableError: + pass + tbl = catalog.create_table(identifier=tbl_name, schema=schema, partition_spec=partition_spec, sort_order=sort_order) + expected_schema = Schema( + NestedField(field_id=1, name="col_uuid", field_type=UUIDType(), required=False), + NestedField(field_id=2, name="col_fixed", field_type=FixedType(25), required=False), + ) + expected_spec = PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=IdentityTransform(), name="col_uuid"), spec_id=0 + ) + expected_sort_order = SortOrder(SortField(source_id=2, transform=IdentityTransform())) + assert tbl.schema() == expected_schema + assert tbl.spec() == expected_spec + assert tbl.sort_order() == expected_sort_order From ba2fe437c825943478307bf8ae431d9b8de0cb07 Mon Sep 17 00:00:00 2001 From: Honah J Date: Thu, 8 Feb 2024 00:50:40 -0800 Subject: [PATCH 095/169] Centralized table properties management (#388) * add TableProperties and PropertyUtil * fix lint * Revert unrelated change --------- Co-authored-by: Fokko Driesprong --- pyiceberg/catalog/hive.py | 4 +- pyiceberg/io/pyarrow.py | 67 ++++++++++++++++----------------- pyiceberg/table/__init__.py | 47 ++++++++++++++++++++++- pyiceberg/table/name_mapping.py | 2 - 4 files changed, 80 insertions(+), 40 deletions(-) diff --git a/pyiceberg/catalog/hive.py b/pyiceberg/catalog/hive.py index 8069321095..d81404f77a 100644 --- a/pyiceberg/catalog/hive.py +++ b/pyiceberg/catalog/hive.py @@ -67,7 +67,7 @@ from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec from pyiceberg.schema import Schema, SchemaVisitor, visit from pyiceberg.serializers import FromInputFile -from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, update_table_metadata +from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, TableProperties, update_table_metadata from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT @@ -155,7 +155,7 @@ def _construct_hive_storage_descriptor(schema: Schema, location: Optional[str]) PROP_TABLE_TYPE = "table_type" PROP_METADATA_LOCATION = "metadata_location" PROP_PREVIOUS_METADATA_LOCATION = "previous_metadata_location" -DEFAULT_PROPERTIES = {'write.parquet.compression-codec': 'zstd'} +DEFAULT_PROPERTIES = {TableProperties.PARQUET_COMPRESSION: TableProperties.PARQUET_COMPRESSION_DEFAULT} def _construct_parameters(metadata_location: str, previous_metadata_location: Optional[str] = None) -> Dict[str, Any]: diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 1b14771624..ee8f63f7fd 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -124,7 +124,7 @@ visit, visit_with_partner, ) -from pyiceberg.table import WriteTask +from pyiceberg.table import PropertyUtil, TableProperties, WriteTask from pyiceberg.table.name_mapping import NameMapping from pyiceberg.transforms import TruncateTransform from pyiceberg.typedef import EMPTY_DICT, Properties, Record @@ -1389,19 +1389,12 @@ class MetricModeTypes(Enum): FULL = "full" -DEFAULT_METRICS_MODE_KEY = "write.metadata.metrics.default" -COLUMN_METRICS_MODE_KEY_PREFIX = "write.metadata.metrics.column" - - @dataclass(frozen=True) class MetricsMode(Singleton): type: MetricModeTypes length: Optional[int] = None -_DEFAULT_METRICS_MODE = MetricsMode(MetricModeTypes.TRUNCATE, DEFAULT_TRUNCATION_LENGTH) - - def match_metrics_mode(mode: str) -> MetricsMode: sanitized_mode = mode.strip().lower() if sanitized_mode.startswith("truncate"): @@ -1435,12 +1428,14 @@ class PyArrowStatisticsCollector(PreOrderSchemaVisitor[List[StatisticsCollector] _field_id: int = 0 _schema: Schema _properties: Dict[str, str] - _default_mode: Optional[str] + _default_mode: str def __init__(self, schema: Schema, properties: Dict[str, str]): self._schema = schema self._properties = properties - self._default_mode = self._properties.get(DEFAULT_METRICS_MODE_KEY) + self._default_mode = self._properties.get( + TableProperties.DEFAULT_WRITE_METRICS_MODE, TableProperties.DEFAULT_WRITE_METRICS_MODE_DEFAULT + ) def schema(self, schema: Schema, struct_result: Callable[[], List[StatisticsCollector]]) -> List[StatisticsCollector]: return struct_result() @@ -1475,12 +1470,9 @@ def primitive(self, primitive: PrimitiveType) -> List[StatisticsCollector]: if column_name is None: return [] - metrics_mode = _DEFAULT_METRICS_MODE - - if self._default_mode: - metrics_mode = match_metrics_mode(self._default_mode) + metrics_mode = match_metrics_mode(self._default_mode) - col_mode = self._properties.get(f"{COLUMN_METRICS_MODE_KEY_PREFIX}.{column_name}") + col_mode = self._properties.get(f"{TableProperties.METRICS_MODE_COLUMN_CONF_PREFIX}.{column_name}") if col_mode: metrics_mode = match_metrics_mode(col_mode) @@ -1767,33 +1759,40 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: return iter([data_file]) -def _get_parquet_writer_kwargs(table_properties: Properties) -> Dict[str, Any]: - def _get_int(key: str, default: Optional[int] = None) -> Optional[int]: - if value := table_properties.get(key): - try: - return int(value) - except ValueError as e: - raise ValueError(f"Could not parse table property {key} to an integer: {value}") from e - else: - return default +ICEBERG_UNCOMPRESSED_CODEC = "uncompressed" +PYARROW_UNCOMPRESSED_CODEC = "none" + +def _get_parquet_writer_kwargs(table_properties: Properties) -> Dict[str, Any]: for key_pattern in [ - "write.parquet.row-group-size-bytes", - "write.parquet.page-row-limit", - "write.parquet.bloom-filter-max-bytes", - "write.parquet.bloom-filter-enabled.column.*", + TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES, + TableProperties.PARQUET_PAGE_ROW_LIMIT, + TableProperties.PARQUET_BLOOM_FILTER_MAX_BYTES, + f"{TableProperties.PARQUET_BLOOM_FILTER_COLUMN_ENABLED_PREFIX}.*", ]: if unsupported_keys := fnmatch.filter(table_properties, key_pattern): raise NotImplementedError(f"Parquet writer option(s) {unsupported_keys} not implemented") - compression_codec = table_properties.get("write.parquet.compression-codec", "zstd") - compression_level = _get_int("write.parquet.compression-level") - if compression_codec == "uncompressed": - compression_codec = "none" + compression_codec = table_properties.get(TableProperties.PARQUET_COMPRESSION, TableProperties.PARQUET_COMPRESSION_DEFAULT) + compression_level = PropertyUtil.property_as_int( + properties=table_properties, + property_name=TableProperties.PARQUET_COMPRESSION_LEVEL, + default=TableProperties.PARQUET_COMPRESSION_LEVEL_DEFAULT, + ) + if compression_codec == ICEBERG_UNCOMPRESSED_CODEC: + compression_codec = PYARROW_UNCOMPRESSED_CODEC return { "compression": compression_codec, "compression_level": compression_level, - "data_page_size": _get_int("write.parquet.page-size-bytes"), - "dictionary_pagesize_limit": _get_int("write.parquet.dict-size-bytes", default=2 * 1024 * 1024), + "data_page_size": PropertyUtil.property_as_int( + properties=table_properties, + property_name=TableProperties.PARQUET_PAGE_SIZE_BYTES, + default=TableProperties.PARQUET_PAGE_SIZE_BYTES_DEFAULT, + ), + "dictionary_pagesize_limit": PropertyUtil.property_as_int( + properties=table_properties, + property_name=TableProperties.PARQUET_DICT_SIZE_BYTES, + default=TableProperties.PARQUET_DICT_SIZE_BYTES_DEFAULT, + ), } diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 1feffc60ed..8670bc2f77 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -85,7 +85,6 @@ TableMetadataUtil, ) from pyiceberg.table.name_mapping import ( - SCHEMA_NAME_MAPPING_DEFAULT, NameMapping, create_mapping_from_schema, parse_mapping_from_json, @@ -134,6 +133,50 @@ _JAVA_LONG_MAX = 9223372036854775807 +class TableProperties: + PARQUET_ROW_GROUP_SIZE_BYTES = "write.parquet.row-group-size-bytes" + PARQUET_ROW_GROUP_SIZE_BYTES_DEFAULT = 128 * 1024 * 1024 # 128 MB + + PARQUET_PAGE_SIZE_BYTES = "write.parquet.page-size-bytes" + PARQUET_PAGE_SIZE_BYTES_DEFAULT = 1024 * 1024 # 1 MB + + PARQUET_PAGE_ROW_LIMIT = "write.parquet.page-row-limit" + PARQUET_PAGE_ROW_LIMIT_DEFAULT = 20000 + + PARQUET_DICT_SIZE_BYTES = "write.parquet.dict-size-bytes" + PARQUET_DICT_SIZE_BYTES_DEFAULT = 2 * 1024 * 1024 # 2 MB + + PARQUET_COMPRESSION = "write.parquet.compression-codec" + PARQUET_COMPRESSION_DEFAULT = "zstd" + + PARQUET_COMPRESSION_LEVEL = "write.parquet.compression-level" + PARQUET_COMPRESSION_LEVEL_DEFAULT = None + + PARQUET_BLOOM_FILTER_MAX_BYTES = "write.parquet.bloom-filter-max-bytes" + PARQUET_BLOOM_FILTER_MAX_BYTES_DEFAULT = 1024 * 1024 + + PARQUET_BLOOM_FILTER_COLUMN_ENABLED_PREFIX = "write.parquet.bloom-filter-enabled.column" + + DEFAULT_WRITE_METRICS_MODE = "write.metadata.metrics.default" + DEFAULT_WRITE_METRICS_MODE_DEFAULT = "truncate(16)" + + METRICS_MODE_COLUMN_CONF_PREFIX = "write.metadata.metrics.column" + + DEFAULT_NAME_MAPPING = "schema.name-mapping.default" + + +class PropertyUtil: + @staticmethod + def property_as_int(properties: Dict[str, str], property_name: str, default: Optional[int] = None) -> Optional[int]: + if value := properties.get(property_name): + try: + return int(value) + except ValueError as e: + raise ValueError(f"Could not parse table property {property_name} to an integer: {value}") from e + else: + return default + + class Transaction: _table: Table _updates: Tuple[TableUpdate, ...] @@ -921,7 +964,7 @@ def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive def name_mapping(self) -> NameMapping: """Return the table's field-id NameMapping.""" - if name_mapping_json := self.properties.get(SCHEMA_NAME_MAPPING_DEFAULT): + if name_mapping_json := self.properties.get(TableProperties.DEFAULT_NAME_MAPPING): return parse_mapping_from_json(name_mapping_json) else: return create_mapping_from_schema(self.schema()) diff --git a/pyiceberg/table/name_mapping.py b/pyiceberg/table/name_mapping.py index 84a295f5e4..ffe96359a8 100644 --- a/pyiceberg/table/name_mapping.py +++ b/pyiceberg/table/name_mapping.py @@ -34,8 +34,6 @@ from pyiceberg.typedef import IcebergBaseModel, IcebergRootModel from pyiceberg.types import ListType, MapType, NestedField, PrimitiveType, StructType -SCHEMA_NAME_MAPPING_DEFAULT = "schema.name-mapping.default" - class MappedField(IcebergBaseModel): field_id: int = Field(alias="field-id") From 7a1fe28df38bfdc69166915dca49cfde660853bf Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 8 Feb 2024 11:13:03 +0100 Subject: [PATCH 096/169] Enable setting `{page,row-group}` limit (#390) --- mkdocs/docs/configuration.md | 2 ++ pyiceberg/io/pyarrow.py | 12 +++++++++++- pyiceberg/table/__init__.py | 3 +++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index ce17931169..8acc0a98cb 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -57,7 +57,9 @@ Iceberg tables support table properties to configure table behavior. | `write.parquet.compression-codec` | `{uncompressed,zstd,gzip,snappy}` | zstd | Sets the Parquet compression coddec. | | `write.parquet.compression-level` | Integer | null | Parquet compression level for the codec. If not set, it is up to PyIceberg | | `write.parquet.page-size-bytes` | Size in bytes | 1MB | Set a target threshold for the approximate encoded size of data pages within a column chunk | +| `write.parquet.page-row-limit` | Number of rows | 20000 | Set a target threshold for the approximate encoded size of data pages within a column chunk | | `write.parquet.dict-size-bytes` | Size in bytes | 2MB | Set the dictionary page size limit per row group | +| `write.parquet.row-group-limit` | Number of rows | 122880 | The Parquet row group limit | # FileIO diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index ee8f63f7fd..86571449fb 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -1730,9 +1730,14 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: file_schema = schema_to_pyarrow(table.schema()) fo = table.io.new_output(file_path) + row_group_size = PropertyUtil.property_as_int( + properties=table.properties, + property_name=TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES, + default=TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES_DEFAULT, + ) with fo.create(overwrite=True) as fos: with pq.ParquetWriter(fos, schema=file_schema, **parquet_writer_kwargs) as writer: - writer.write_table(task.df) + writer.write_table(task.df, row_group_size=row_group_size) data_file = DataFile( content=DataFileContent.DATA, @@ -1795,4 +1800,9 @@ def _get_parquet_writer_kwargs(table_properties: Properties) -> Dict[str, Any]: property_name=TableProperties.PARQUET_DICT_SIZE_BYTES, default=TableProperties.PARQUET_DICT_SIZE_BYTES_DEFAULT, ), + "write_batch_size": PropertyUtil.property_as_int( + properties=table_properties, + property_name=TableProperties.PARQUET_PAGE_ROW_LIMIT, + default=TableProperties.PARQUET_PAGE_ROW_LIMIT_DEFAULT, + ), } diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 8670bc2f77..cc4bbf52a3 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -137,6 +137,9 @@ class TableProperties: PARQUET_ROW_GROUP_SIZE_BYTES = "write.parquet.row-group-size-bytes" PARQUET_ROW_GROUP_SIZE_BYTES_DEFAULT = 128 * 1024 * 1024 # 128 MB + PARQUET_ROW_GROUP_LIMIT = "write.parquet.row-group-limit" + PARQUET_ROW_GROUP_LIMIT_DEFAULT = 128 * 1024 * 1024 # 128 MB + PARQUET_PAGE_SIZE_BYTES = "write.parquet.page-size-bytes" PARQUET_PAGE_SIZE_BYTES_DEFAULT = 1024 * 1024 # 1 MB From 8821d64932e4eaad1ba9702c54d70492a47fc6ab Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 08:25:48 +0100 Subject: [PATCH 097/169] Build: Bump griffe from 0.40.0 to 0.40.1 (#400) Bumps [griffe](https://github.com/mkdocstrings/griffe) from 0.40.0 to 0.40.1. - [Release notes](https://github.com/mkdocstrings/griffe/releases) - [Changelog](https://github.com/mkdocstrings/griffe/blob/main/CHANGELOG.md) - [Commits](https://github.com/mkdocstrings/griffe/compare/0.40.0...0.40.1) --- updated-dependencies: - dependency-name: griffe dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index b247aaf4b3..0657252c18 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -16,7 +16,7 @@ # under the License. mkdocs==1.5.3 -griffe==0.40.0 +griffe==0.40.1 jinja2==3.1.3 mkdocstrings==0.24.0 mkdocstrings-python==1.8.0 From b99468c032db826b6b5274931cc087983444fc22 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 08:26:51 +0100 Subject: [PATCH 098/169] Build: Bump getdaft from 0.2.12 to 0.2.13 (#399) Bumps [getdaft](https://github.com/Eventual-Inc/Daft) from 0.2.12 to 0.2.13. - [Release notes](https://github.com/Eventual-Inc/Daft/releases) - [Commits](https://github.com/Eventual-Inc/Daft/compare/v0.2.12...v0.2.13) --- updated-dependencies: - dependency-name: getdaft dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index b3a68cce73..8d7b42d2cc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1211,17 +1211,17 @@ gcsfuse = ["fusepy"] [[package]] name = "getdaft" -version = "0.2.12" +version = "0.2.13" description = "A Distributed DataFrame library for large scale complex data processing." optional = true python-versions = ">=3.7" files = [ - {file = "getdaft-0.2.12-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:ec3d9693e6d859a7604e7597695e51383bdd4a0100fa892a030510c81dc7a214"}, - {file = "getdaft-0.2.12-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:44f12f57b8dfd35c6a8ff9da749f4243c9558403e0c65720298f65079436e505"}, - {file = "getdaft-0.2.12-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:565713fa393970b3a891e9c02e8fb20ebce9b7b1278a00299e5159b71a76fac7"}, - {file = "getdaft-0.2.12-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed061e93c55cb757061eac48675f4d9bb8960ed3ee1b2df64ade848946875f2f"}, - {file = "getdaft-0.2.12-cp37-abi3-win_amd64.whl", hash = "sha256:7970903d4986acbf0a99e812d71fd12c9b63911cb7b601085354864e032dfa94"}, - {file = "getdaft-0.2.12.tar.gz", hash = "sha256:9a8698cf04b8e6ed11c1cc960498d438748e8d1a316b4da2e7faed62e90f00b8"}, + {file = "getdaft-0.2.13-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:e7b7648642ddafeb34f9f91e1f847ed9b21244c33c05f98e63b77ef241c3672e"}, + {file = "getdaft-0.2.13-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:7bdeb828a20d9e2507a378ae3312a9071da166df1ab6ef3a4d8b6073eff685ad"}, + {file = "getdaft-0.2.13-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c20447188e83a1fd9b5e8edbe75b0435d08299dca00e8106ebb5c39b66e5f47c"}, + {file = "getdaft-0.2.13-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef015020b9de596d5ebfe2855f55ce56f8fb44ff2ed003f803e14fc4163a3638"}, + {file = "getdaft-0.2.13-cp37-abi3-win_amd64.whl", hash = "sha256:82a3bb1d08f8dad9a87c33445b87a16fbad253507e055a0dd7a16574d230b7b1"}, + {file = "getdaft-0.2.13.tar.gz", hash = "sha256:a814012436513becfdb8bd173868a5fb7d7582085d4be4d782703f9219add786"}, ] [package.dependencies] From 33b555a1310fd75914c012c4b4cc07108b98bac0 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 9 Feb 2024 08:27:09 +0100 Subject: [PATCH 099/169] Downgrade to the previous artifact upload action (#396) With the new version, you cannot merge the different runs anymore into a single zip: https://github.com/actions/upload-artifact?tab=readme-ov-file#breaking-changes Which is quite annoying since you have to merge them by hand. --- .github/workflows/python-release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index 5192d711ce..a499446d4e 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -80,7 +80,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu') run: ls -lah dist/* && cp dist/* wheelhouse/ - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@v3 with: - name: "release-${{ matrix.os }}-${{ github.event.inputs.version }}" + name: "release-${{ matrix.os }}" path: ./wheelhouse/* From 2e6730863aa4c6f22e0712c56b08658062f73677 Mon Sep 17 00:00:00 2001 From: Daniel Weeks Date: Sat, 10 Feb 2024 07:47:33 -0800 Subject: [PATCH 100/169] Hive locking (#405) * Add hive locking for commit path * Add integration test * Fix identifier assignment * Fix identifer unpacking * Import properties * Use properties * Remove unnecessary fixture * Update `catalog_hive` * Separate lock client from commit client * Fix match expression * Use separate client for locking * Lint --------- Co-authored-by: Fokko Driesprong --- pyiceberg/catalog/hive.py | 34 +++++++++++++++++++++++++++++---- tests/integration/test_reads.py | 26 ++++++++++++++++++++++++- 2 files changed, 55 insertions(+), 5 deletions(-) diff --git a/pyiceberg/catalog/hive.py b/pyiceberg/catalog/hive.py index d81404f77a..aba3c173e6 100644 --- a/pyiceberg/catalog/hive.py +++ b/pyiceberg/catalog/hive.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import getpass +import socket import time from types import TracebackType from typing import ( @@ -34,10 +35,17 @@ AlreadyExistsException, FieldSchema, InvalidOperationException, + LockComponent, + LockLevel, + LockRequest, + LockResponse, + LockState, + LockType, MetaException, NoSuchObjectException, SerDeInfo, StorageDescriptor, + UnlockRequest, ) from hive_metastore.ttypes import Database as HiveDatabase from hive_metastore.ttypes import Table as HiveTable @@ -56,6 +64,7 @@ PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( + CommitFailedException, NamespaceAlreadyExistsError, NamespaceNotEmptyError, NoSuchIcebergTableError, @@ -331,6 +340,15 @@ def register_table(self, identifier: Union[str, Identifier], metadata_location: """ raise NotImplementedError + def _create_lock_request(self, database_name: str, table_name: str) -> LockRequest: + lock_component: LockComponent = LockComponent( + level=LockLevel.TABLE, type=LockType.EXCLUSIVE, dbname=database_name, tablename=table_name, isTransactional=True + ) + + lock_request: LockRequest = LockRequest(component=[lock_component], user=getpass.getuser(), hostname=socket.gethostname()) + + return lock_request + def _commit_table(self, table_request: CommitTableRequest) -> CommitTableResponse: """Update the table. @@ -363,15 +381,23 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons self._write_metadata(updated_metadata, current_table.io, new_metadata_location) # commit to hive - try: - with self._client as open_client: + # https://github.com/apache/hive/blob/master/standalone-metastore/metastore-common/src/main/thrift/hive_metastore.thrift#L1232 + with self._client as open_client: + lock: LockResponse = open_client.lock(self._create_lock_request(database_name, table_name)) + + try: + if lock.state != LockState.ACQUIRED: + raise CommitFailedException(f"Failed to acquire lock for {table_request.identifier}, state: {lock.state}") + tbl = open_client.get_table(dbname=database_name, tbl_name=table_name) tbl.parameters = _construct_parameters( metadata_location=new_metadata_location, previous_metadata_location=current_table.metadata_location ) open_client.alter_table(dbname=database_name, tbl_name=table_name, new_tbl=tbl) - except NoSuchObjectException as e: - raise NoSuchTableError(f"Table does not exist: {table_name}") from e + except NoSuchObjectException as e: + raise NoSuchTableError(f"Table does not exist: {table_name}") from e + finally: + open_client.unlock(UnlockRequest(lockid=lock.lockid)) return CommitTableResponse(metadata=updated_metadata, metadata_location=new_metadata_location) diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index d487a6477e..c03bc78a18 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -22,10 +22,12 @@ import pyarrow.parquet as pq import pytest +from hive_metastore.ttypes import LockRequest, LockResponse, LockState, UnlockRequest from pyarrow.fs import S3FileSystem from pyiceberg.catalog import Catalog, load_catalog -from pyiceberg.exceptions import NoSuchTableError +from pyiceberg.catalog.hive import HiveCatalog, _HiveClient +from pyiceberg.exceptions import CommitFailedException, NoSuchTableError from pyiceberg.expressions import ( And, EqualTo, @@ -467,3 +469,25 @@ def test_null_list_and_map(catalog: Catalog) -> None: # assert arrow_table["col_list_with_struct"].to_pylist() == [None, [{'test': 1}]] # Once https://github.com/apache/arrow/issues/38809 has been fixed assert arrow_table["col_list_with_struct"].to_pylist() == [[], [{'test': 1}]] + + +@pytest.mark.integration +def test_hive_locking(catalog_hive: HiveCatalog) -> None: + table = create_table(catalog_hive) + + database_name: str + table_name: str + _, database_name, table_name = table.identifier + + hive_client: _HiveClient = _HiveClient(catalog_hive.properties["uri"]) + blocking_lock_request: LockRequest = catalog_hive._create_lock_request(database_name, table_name) + + with hive_client as open_client: + # Force a lock on the test table + lock: LockResponse = open_client.lock(blocking_lock_request) + assert lock.state == LockState.ACQUIRED + try: + with pytest.raises(CommitFailedException, match="(Failed to acquire lock for).*"): + table.transaction().set_properties(lock="fail").commit_transaction() + finally: + open_client.unlock(UnlockRequest(lock.lockid)) From a576fc9ae7619d329f4d9bcd667231fc1be94a57 Mon Sep 17 00:00:00 2001 From: Kibum Park <38655427+castedice@users.noreply.github.com> Date: Sun, 11 Feb 2024 00:57:43 +0900 Subject: [PATCH 101/169] Arrow: Support Large Binary when using `to_arrow` (#409) * Arrow: Support Large Binary * Merge with binary --------- Co-authored-by: Fokko Driesprong --- pyiceberg/io/pyarrow.py | 4 ++-- tests/integration/test_writes.py | 2 +- tests/io/test_pyarrow.py | 2 +- tests/io/test_pyarrow_visitor.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 86571449fb..57f09ba172 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -533,7 +533,7 @@ def visit_uuid(self, _: UUIDType) -> pa.DataType: return pa.binary(16) def visit_binary(self, _: BinaryType) -> pa.DataType: - return pa.binary() + return pa.large_binary() def _convert_scalar(value: Any, iceberg_type: IcebergType) -> pa.scalar: @@ -882,7 +882,7 @@ def primitive(self, primitive: pa.DataType) -> PrimitiveType: return TimestamptzType() elif primitive.tz is None: return TimestampType() - elif pa.types.is_binary(primitive): + elif pa.types.is_binary(primitive) or pa.types.is_large_binary(primitive): return BinaryType() elif pa.types.is_fixed_size_binary(primitive): primitive = cast(pa.FixedSizeBinaryType, primitive) diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index c08916bc68..58ab830319 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -140,7 +140,7 @@ def pa_schema() -> pa.Schema: # ("time", pa.time64("us")), # Not natively supported by Arrow # ("uuid", pa.fixed(16)), - ("binary", pa.binary()), + ("binary", pa.large_binary()), ("fixed", pa.binary(16)), ]) diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index 745de1a3d3..a3dd56db7f 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -467,7 +467,7 @@ def test_string_type_to_pyarrow() -> None: def test_binary_type_to_pyarrow() -> None: iceberg_type = BinaryType() - assert visit(iceberg_type, _ConvertToArrowSchema()) == pa.binary() + assert visit(iceberg_type, _ConvertToArrowSchema()) == pa.large_binary() def test_struct_type_to_pyarrow(table_schema_simple: Schema) -> None: diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index c6ba18c7b0..7d35cae424 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -215,7 +215,7 @@ def test_pyarrow_string_to_iceberg() -> None: def test_pyarrow_variable_binary_to_iceberg() -> None: - pyarrow_type = pa.binary() + pyarrow_type = pa.large_binary() converted_iceberg_type = visit_pyarrow(pyarrow_type, _ConvertToIceberg()) assert converted_iceberg_type == BinaryType() assert visit(converted_iceberg_type, _ConvertToArrowSchema()) == pyarrow_type From fbeeb9ab88a4168c8ae75a9a1f3dd109821ae9b0 Mon Sep 17 00:00:00 2001 From: DK <4944562+vaultah@users.noreply.github.com> Date: Mon, 12 Feb 2024 04:42:25 +0000 Subject: [PATCH 102/169] Use the correct name of extra that adds GCS support (#412) --- mkdocs/docs/index.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mkdocs/docs/index.md b/mkdocs/docs/index.md index ffe3ab4681..a8c2c6bd3c 100644 --- a/mkdocs/docs/index.md +++ b/mkdocs/docs/index.md @@ -55,9 +55,9 @@ You can mix and match optional dependencies depending on your needs: | s3fs | S3FS as a FileIO implementation to interact with the object store | | adlfs | ADLFS as a FileIO implementation to interact with the object store | | snappy | Support for snappy Avro compression | -| gcs | GCS as the FileIO implementation to interact with the object store | +| gcsfs | GCSFS as a FileIO implementation to interact with the object store | -You either need to install `s3fs`, `adlfs`, `gcs`, or `pyarrow` to be able to fetch files from an object store. +You either need to install `s3fs`, `adlfs`, `gcsfs`, or `pyarrow` to be able to fetch files from an object store. ## Connecting to a catalog From dab5d768d17cb60b443e299b7a945dece559f7b1 Mon Sep 17 00:00:00 2001 From: Amogh Jahagirdar Date: Mon, 12 Feb 2024 03:05:30 -0700 Subject: [PATCH 103/169] Fix setting V1 format version for Non-REST catalogs (#411) --- pyiceberg/table/__init__.py | 2 ++ pyiceberg/table/metadata.py | 28 +++++++++++++-- tests/catalog/test_glue.py | 32 +++++++++++++++++ tests/catalog/test_hive.py | 55 +++++++++++++++++++++++++++- tests/catalog/test_sql.py | 19 ++++++++++ tests/table/test_metadata.py | 69 ++++++++++++++++++++++++++++++++++++ 6 files changed, 201 insertions(+), 4 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index cc4bbf52a3..a87435fcfb 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -166,6 +166,8 @@ class TableProperties: METRICS_MODE_COLUMN_CONF_PREFIX = "write.metadata.metrics.column" DEFAULT_NAME_MAPPING = "schema.name-mapping.default" + FORMAT_VERSION = "format-version" + DEFAULT_FORMAT_VERSION = 2 class PropertyUtil: diff --git a/pyiceberg/table/metadata.py b/pyiceberg/table/metadata.py index 43e29c7b03..a5dfb6ce4c 100644 --- a/pyiceberg/table/metadata.py +++ b/pyiceberg/table/metadata.py @@ -260,8 +260,10 @@ def set_v2_compatible_defaults(cls, data: Dict[str, Any]) -> Dict[str, Any]: The TableMetadata with the defaults applied. """ # When the schema doesn't have an ID - if data.get("schema") and "schema_id" not in data["schema"]: - data["schema"]["schema_id"] = DEFAULT_SCHEMA_ID + schema = data.get("schema") + if isinstance(schema, dict): + if "schema_id" not in schema and "schema-id" not in schema: + schema["schema_id"] = DEFAULT_SCHEMA_ID return data @@ -335,7 +337,7 @@ def to_v2(self) -> TableMetadataV2: metadata["format-version"] = 2 return TableMetadataV2.model_validate(metadata) - format_version: Literal[1] = Field(alias="format-version") + format_version: Literal[1] = Field(alias="format-version", default=1) """An integer version number for the format. Currently, this can be 1 or 2 based on the spec. Implementations must throw an exception if a table’s version is higher than the supported version.""" @@ -404,6 +406,8 @@ def new_table_metadata( properties: Properties = EMPTY_DICT, table_uuid: Optional[uuid.UUID] = None, ) -> TableMetadata: + from pyiceberg.table import TableProperties + fresh_schema = assign_fresh_schema_ids(schema) fresh_partition_spec = assign_fresh_partition_spec_ids(partition_spec, schema, fresh_schema) fresh_sort_order = assign_fresh_sort_order_ids(sort_order, schema, fresh_schema) @@ -411,6 +415,24 @@ def new_table_metadata( if table_uuid is None: table_uuid = uuid.uuid4() + # Remove format-version so it does not get persisted + format_version = int(properties.pop(TableProperties.FORMAT_VERSION, TableProperties.DEFAULT_FORMAT_VERSION)) + if format_version == 1: + return TableMetadataV1( + location=location, + last_column_id=fresh_schema.highest_field_id, + current_schema_id=fresh_schema.schema_id, + schema=fresh_schema, + partition_spec=[field.model_dump() for field in fresh_partition_spec.fields], + partition_specs=[fresh_partition_spec], + default_spec_id=fresh_partition_spec.spec_id, + sort_orders=[fresh_sort_order], + default_sort_order_id=fresh_sort_order.order_id, + properties=properties, + last_partition_id=fresh_partition_spec.last_assigned_field_id, + table_uuid=table_uuid, + ) + return TableMetadataV2( location=location, schemas=[fresh_schema], diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index 270d2251ba..6e0196c1a2 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -72,6 +72,38 @@ def test_create_table_with_database_location( assert storage_descriptor["Location"] == f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}" +@mock_aws +def test_create_v1_table( + _bucket_initialize: None, + _glue: boto3.client, + moto_endpoint_url: str, + table_schema_nested: Schema, + database_name: str, + table_name: str, +) -> None: + catalog_name = "glue" + test_catalog = GlueCatalog(catalog_name, **{"s3.endpoint": moto_endpoint_url}) + test_catalog.create_namespace(namespace=database_name, properties={"location": f"s3://{BUCKET_NAME}/{database_name}.db"}) + table = test_catalog.create_table((database_name, table_name), table_schema_nested, properties={"format-version": "1"}) + assert table.format_version == 1 + + table_info = _glue.get_table( + DatabaseName=database_name, + Name=table_name, + ) + + storage_descriptor = table_info["Table"]["StorageDescriptor"] + columns = storage_descriptor["Columns"] + assert len(columns) == len(table_schema_nested.fields) + assert columns[0] == { + "Name": "foo", + "Type": "string", + "Parameters": {"iceberg.field.id": "1", "iceberg.field.optional": "true", "iceberg.field.current": "true"}, + } + + assert storage_descriptor["Location"] == f"s3://{BUCKET_NAME}/{database_name}.db/{table_name}" + + @mock_aws def test_create_table_with_default_warehouse( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str diff --git a/tests/catalog/test_hive.py b/tests/catalog/test_hive.py index f42962f0f3..dc2689e0d8 100644 --- a/tests/catalog/test_hive.py +++ b/tests/catalog/test_hive.py @@ -43,7 +43,7 @@ ) from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.table.metadata import TableMetadataUtil, TableMetadataV2 +from pyiceberg.table.metadata import TableMetadataUtil, TableMetadataV1, TableMetadataV2 from pyiceberg.table.refs import SnapshotRef, SnapshotRefType from pyiceberg.table.snapshots import ( MetadataLogEntry, @@ -295,6 +295,59 @@ def test_create_table(table_schema_simple: Schema, hive_database: HiveDatabase, assert metadata.model_dump() == expected.model_dump() +@patch("time.time", MagicMock(return_value=12345)) +def test_create_v1_table(table_schema_simple: Schema, hive_database: HiveDatabase, hive_table: HiveTable) -> None: + catalog = HiveCatalog(HIVE_CATALOG_NAME, uri=HIVE_METASTORE_FAKE_URL) + + catalog._client = MagicMock() + catalog._client.__enter__().create_table.return_value = None + catalog._client.__enter__().get_table.return_value = hive_table + catalog._client.__enter__().get_database.return_value = hive_database + catalog.create_table( + ("default", "table"), schema=table_schema_simple, properties={"owner": "javaberg", "format-version": "1"} + ) + + # Test creating V1 table + called_v1_table: HiveTable = catalog._client.__enter__().create_table.call_args[0][0] + metadata_location = called_v1_table.parameters["metadata_location"] + with open(metadata_location, encoding=UTF8) as f: + payload = f.read() + + expected_schema = Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + schema_id=0, + identifier_field_ids=[2], + ) + actual_v1_metadata = TableMetadataUtil.parse_raw(payload) + expected_spec = PartitionSpec() + expected_v1_metadata = TableMetadataV1( + location=actual_v1_metadata.location, + table_uuid=actual_v1_metadata.table_uuid, + last_updated_ms=actual_v1_metadata.last_updated_ms, + last_column_id=3, + schema=expected_schema, + schemas=[expected_schema], + current_schema_id=0, + last_partition_id=1000, + properties={"owner": "javaberg", "write.parquet.compression-codec": "zstd"}, + partition_spec=[], + partition_specs=[expected_spec], + default_spec_id=0, + current_snapshot_id=None, + snapshots=[], + snapshot_log=[], + metadata_log=[], + sort_orders=[SortOrder(order_id=0)], + default_sort_order_id=0, + refs={}, + format_version=1, + ) + + assert actual_v1_metadata.model_dump() == expected_v1_metadata.model_dump() + + def test_load_table(hive_table: HiveTable) -> None: catalog = HiveCatalog(HIVE_CATALOG_NAME, uri=HIVE_METASTORE_FAKE_URL) diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 1ca8fd16d2..1d127378be 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -38,6 +38,7 @@ ) from pyiceberg.io import FSSPEC_FILE_IO, PY_IO_IMPL from pyiceberg.io.pyarrow import schema_to_pyarrow +from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC from pyiceberg.schema import Schema from pyiceberg.table.snapshots import Operation from pyiceberg.table.sorting import ( @@ -158,6 +159,24 @@ def test_create_table_default_sort_order(catalog: SqlCatalog, table_schema_neste catalog.drop_table(random_identifier) +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + ], +) +def test_create_v1_table(catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table = catalog.create_table(random_identifier, table_schema_nested, properties={"format-version": "1"}) + assert table.sort_order().order_id == 0, "Order ID must match" + assert table.sort_order().is_unsorted is True, "Order must be unsorted" + assert table.format_version == 1 + assert table.spec() == UNPARTITIONED_PARTITION_SPEC + catalog.drop_table(random_identifier) + + @pytest.mark.parametrize( 'catalog', [ diff --git a/tests/table/test_metadata.py b/tests/table/test_metadata.py index 2c453b6e03..97a7931cbb 100644 --- a/tests/table/test_metadata.py +++ b/tests/table/test_metadata.py @@ -199,6 +199,75 @@ def test_migrate_v1_partition_specs(example_table_metadata_v1: Dict[str, Any]) - ] +def test_new_table_metadata_with_explicit_v1_format() -> None: + schema = Schema( + NestedField(field_id=10, name="foo", field_type=StringType(), required=False), + NestedField(field_id=22, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=33, name="baz", field_type=BooleanType(), required=False), + schema_id=10, + identifier_field_ids=[22], + ) + + partition_spec = PartitionSpec( + PartitionField(source_id=22, field_id=1022, transform=IdentityTransform(), name="bar"), spec_id=10 + ) + + sort_order = SortOrder( + SortField(source_id=10, transform=IdentityTransform(), direction=SortDirection.ASC, null_order=NullOrder.NULLS_LAST), + order_id=10, + ) + + actual = new_table_metadata( + schema=schema, + partition_spec=partition_spec, + sort_order=sort_order, + location="s3://some_v1_location/", + properties={'format-version': "1"}, + ) + + expected_schema = Schema( + NestedField(field_id=1, name="foo", field_type=StringType(), required=False), + NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), + schema_id=0, + identifier_field_ids=[2], + ) + + expected_spec = PartitionSpec(PartitionField(source_id=2, field_id=1000, transform=IdentityTransform(), name="bar")) + + expected = TableMetadataV1( + location="s3://some_v1_location/", + table_uuid=actual.table_uuid, + last_updated_ms=actual.last_updated_ms, + last_column_id=3, + schemas=[expected_schema], + schema_=expected_schema, + current_schema_id=0, + partition_spec=[field.model_dump() for field in expected_spec.fields], + partition_specs=[expected_spec], + default_spec_id=0, + last_partition_id=1000, + properties={}, + current_snapshot_id=None, + snapshots=[], + snapshot_log=[], + metadata_log=[], + sort_orders=[ + SortOrder( + SortField( + source_id=1, transform=IdentityTransform(), direction=SortDirection.ASC, null_order=NullOrder.NULLS_LAST + ), + order_id=1, + ) + ], + default_sort_order_id=1, + refs={}, + format_version=1, + ) + + assert actual.model_dump() == expected.model_dump() + + def test_invalid_format_version(example_table_metadata_v1: Dict[str, Any]) -> None: """Test the exception when trying to load an unknown version""" From e9e265a1878a55a4385580bf800382fd75c912c9 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Mon, 12 Feb 2024 11:05:45 +0100 Subject: [PATCH 104/169] Add Thrift and Hive to NOTICE (#410) * Add Thrift and Hive to NOTICE * Add Avro * Move from license to notice --- LICENSE | 33 --------------------------------- NOTICE | 31 ++++++++++++++++++++++++++++++- 2 files changed, 30 insertions(+), 34 deletions(-) diff --git a/LICENSE b/LICENSE index ffdd12aad2..d645695673 100644 --- a/LICENSE +++ b/LICENSE @@ -200,36 +200,3 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - --------------------------------------------------------------------------------- - -This product includes code from Apache Avro. - -* Code for initializing the Avro (de)compression codecs -* The Binary decoder for reading in an Avro byte stream - -Copyright: 2014-2022 The Apache Software Foundation. -Home page: https://avro.apache.org/ -License: https://www.apache.org/licenses/LICENSE-2.0 - --------------------------------------------------------------------------------- - -This product includes code from Apache Thrift. - -* Uses the fb303.thrift file that's part of Hive's thrift service in vendor/fb303/ - -Copyright: 2006-2022 The Apache Software Foundation. -Home page: https://thrift.apache.org/ -License: https://www.apache.org/licenses/LICENSE-2.0 - --------------------------------------------------------------------------------- - -This product includes code from Apache Hive. - -* Uses hive_metastore.thrift to generate the Hive Metastore client in vendor/hive_metastore/ - -Copyright: 2008-2022 The Apache Software Foundation. -Home page: https://hive.apache.org/ -License: https://www.apache.org/licenses/LICENSE-2.0 - --------------------------------------------------------------------------------- diff --git a/NOTICE b/NOTICE index d7a3c57526..9acae0de12 100644 --- a/NOTICE +++ b/NOTICE @@ -1,8 +1,37 @@ Apache Iceberg -Copyright 2017-2022 The Apache Software Foundation +Copyright 2017-2024 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- + +This product includes code from Apache Avro. + +* Code for initializing the Avro (de)compression codecs +* The Binary decoder for reading in an Avro byte stream + +Copyright: 2014-2022 The Apache Software Foundation. +Home page: https://avro.apache.org/ +License: https://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This product includes code from Apache Thrift. + +* Uses the fb303.thrift file that's part of Hive's thrift service in vendor/fb303/ + +Copyright: 2006-2022 The Apache Software Foundation. +Home page: https://thrift.apache.org/ +License: https://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This product includes code from Apache Hive. + +* Uses hive_metastore.thrift to generate the Hive Metastore client in vendor/hive_metastore/ + +Copyright: 2008-2022 The Apache Software Foundation. +Home page: https://hive.apache.org/ +License: https://www.apache.org/licenses/LICENSE-2.0 From 9aa42c2c7802b555c83960788f16c69df2b79bfb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 09:28:39 +0100 Subject: [PATCH 105/169] Build: Bump mkdocs-material from 9.5.8 to 9.5.9 (#418) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.8 to 9.5.9. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.8...9.5.9) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 0657252c18..03d9e4f479 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -23,6 +23,6 @@ mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 -mkdocs-material==9.5.8 +mkdocs-material==9.5.9 mkdocs-material-extensions==1.3.1 mkdocs-section-index==0.3.8 From 8ec62c13c88ee2691a5880417be0c795189f70ea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 09:32:26 +0100 Subject: [PATCH 106/169] Build: Bump sqlalchemy from 2.0.25 to 2.0.26 (#417) Bumps [sqlalchemy](https://github.com/sqlalchemy/sqlalchemy) from 2.0.25 to 2.0.26. - [Release notes](https://github.com/sqlalchemy/sqlalchemy/releases) - [Changelog](https://github.com/sqlalchemy/sqlalchemy/blob/main/CHANGES.rst) - [Commits](https://github.com/sqlalchemy/sqlalchemy/commits) --- updated-dependencies: - dependency-name: sqlalchemy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 100 ++++++++++++++++++++++++++-------------------------- 1 file changed, 50 insertions(+), 50 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8d7b42d2cc..dd02b4bde3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3759,60 +3759,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.25" +version = "2.0.26" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4344d059265cc8b1b1be351bfb88749294b87a8b2bbe21dfbe066c4199541ebd"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9e2e59cbcc6ba1488404aad43de005d05ca56e069477b33ff74e91b6319735"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84daa0a2055df9ca0f148a64fdde12ac635e30edbca80e87df9b3aaf419e144a"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc8b7dabe8e67c4832891a5d322cec6d44ef02f432b4588390017f5cec186a84"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5693145220517b5f42393e07a6898acdfe820e136c98663b971906120549da5"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db854730a25db7c956423bb9fb4bdd1216c839a689bf9cc15fada0a7fb2f4570"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win32.whl", hash = "sha256:14a6f68e8fc96e5e8f5647ef6cda6250c780612a573d99e4d881581432ef1669"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win_amd64.whl", hash = "sha256:87f6e732bccd7dcf1741c00f1ecf33797383128bd1c90144ac8adc02cbb98643"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:342d365988ba88ada8af320d43df4e0b13a694dbd75951f537b2d5e4cb5cd002"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f37c0caf14b9e9b9e8f6dbc81bc56db06acb4363eba5a633167781a48ef036ed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9373708763ef46782d10e950b49d0235bfe58facebd76917d3f5cbf5971aed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24f571990c05f6b36a396218f251f3e0dda916e0c687ef6fdca5072743208f5"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75432b5b14dc2fff43c50435e248b45c7cdadef73388e5610852b95280ffd0e9"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:884272dcd3ad97f47702965a0e902b540541890f468d24bd1d98bcfe41c3f018"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win32.whl", hash = "sha256:e607cdd99cbf9bb80391f54446b86e16eea6ad309361942bf88318bcd452363c"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d505815ac340568fd03f719446a589162d55c52f08abd77ba8964fbb7eb5b5f"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0dacf67aee53b16f365c589ce72e766efaabd2b145f9de7c917777b575e3659d"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b801154027107461ee992ff4b5c09aa7cc6ec91ddfe50d02bca344918c3265c6"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a21853f5daeb50412d459cfb13cb82c089ad4c04ec208cd14dddd99fc23b39"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29049e2c299b5ace92cbed0c1610a7a236f3baf4c6b66eb9547c01179f638ec5"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b64b183d610b424a160b0d4d880995e935208fc043d0302dd29fee32d1ee3f95"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f7a7d7fcc675d3d85fbf3b3828ecd5990b8d61bd6de3f1b260080b3beccf215"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win32.whl", hash = "sha256:cf18ff7fc9941b8fc23437cc3e68ed4ebeff3599eec6ef5eebf305f3d2e9a7c2"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win_amd64.whl", hash = "sha256:91f7d9d1c4dd1f4f6e092874c128c11165eafcf7c963128f79e28f8445de82d5"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bb209a73b8307f8fe4fe46f6ad5979649be01607f11af1eb94aa9e8a3aaf77f0"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:798f717ae7c806d67145f6ae94dc7c342d3222d3b9a311a784f371a4333212c7"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd402169aa00df3142149940b3bf9ce7dde075928c1886d9a1df63d4b8de62"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0d3cab3076af2e4aa5693f89622bef7fa770c6fec967143e4da7508b3dceb9b9"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:74b080c897563f81062b74e44f5a72fa44c2b373741a9ade701d5f789a10ba23"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win32.whl", hash = "sha256:87d91043ea0dc65ee583026cb18e1b458d8ec5fc0a93637126b5fc0bc3ea68c4"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win_amd64.whl", hash = "sha256:75f99202324383d613ddd1f7455ac908dca9c2dd729ec8584c9541dd41822a2c"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:420362338681eec03f53467804541a854617faed7272fe71a1bfdb07336a381e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c88f0c7dcc5f99bdb34b4fd9b69b93c89f893f454f40219fe923a3a2fd11625"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3be4987e3ee9d9a380b66393b77a4cd6d742480c951a1c56a23c335caca4ce3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a159111a0f58fb034c93eeba211b4141137ec4b0a6e75789ab7a3ef3c7e7e3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8b8cb63d3ea63b29074dcd29da4dc6a97ad1349151f2d2949495418fd6e48db9"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:736ea78cd06de6c21ecba7416499e7236a22374561493b456a1f7ffbe3f6cdb4"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win32.whl", hash = "sha256:10331f129982a19df4284ceac6fe87353ca3ca6b4ca77ff7d697209ae0a5915e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win_amd64.whl", hash = "sha256:c55731c116806836a5d678a70c84cb13f2cedba920212ba7dcad53260997666d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:605b6b059f4b57b277f75ace81cc5bc6335efcbcc4ccb9066695e515dbdb3900"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:665f0a3954635b5b777a55111ababf44b4fc12b1f3ba0a435b602b6387ffd7cf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecf6d4cda1f9f6cb0b45803a01ea7f034e2f1aed9475e883410812d9f9e3cfcf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51db269513917394faec5e5c00d6f83829742ba62e2ac4fa5c98d58be91662f"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:790f533fa5c8901a62b6fef5811d48980adeb2f51f1290ade8b5e7ba990ba3de"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1b1180cda6df7af84fe72e4530f192231b1f29a7496951db4ff38dac1687202d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win32.whl", hash = "sha256:555651adbb503ac7f4cb35834c5e4ae0819aab2cd24857a123370764dc7d7e24"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win_amd64.whl", hash = "sha256:dc55990143cbd853a5d038c05e79284baedf3e299661389654551bd02a6a68d7"}, - {file = "SQLAlchemy-2.0.25-py3-none-any.whl", hash = "sha256:a86b4240e67d4753dc3092d9511886795b3c2852abe599cffe108952f7af7ac3"}, - {file = "SQLAlchemy-2.0.25.tar.gz", hash = "sha256:a2c69a7664fb2d54b8682dd774c3b54f67f84fa123cf84dda2a5f40dcaa04e08"}, + {file = "SQLAlchemy-2.0.26-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56524d767713054f8758217b3a811f6a736e0ae34e7afc33b594926589aa9609"}, + {file = "SQLAlchemy-2.0.26-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2d8a2c68b279617f13088bdc0fc0e9b5126f8017f8882ff08ee41909fab0713"}, + {file = "SQLAlchemy-2.0.26-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d377645913d47f0dc802b415bcfe7fb085d86646a12278d77c12eb75b5e1b4"}, + {file = "SQLAlchemy-2.0.26-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc0628d2026926404dabc903dc5628f7d936a792aa3a1fc54a20182df8e2172"}, + {file = "SQLAlchemy-2.0.26-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:872f2907ade52601a1e729e85d16913c24dc1f6e7c57d11739f18dcfafde29db"}, + {file = "SQLAlchemy-2.0.26-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba46fa770578b3cf3b5b77dadb7e94fda7692dd4d1989268ef3dcb65f31c40a3"}, + {file = "SQLAlchemy-2.0.26-cp310-cp310-win32.whl", hash = "sha256:651d10fdba7984bf100222d6e4acc496fec46493262b6170be1981ef860c6184"}, + {file = "SQLAlchemy-2.0.26-cp310-cp310-win_amd64.whl", hash = "sha256:8f95ede696ab0d7328862d69f29b643d35b668c4f3619cb2f0281adc16e64c1b"}, + {file = "SQLAlchemy-2.0.26-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fab1bb909bd24accf2024a69edd4f885ded182c079c4dbcd515b4842f86b07cb"}, + {file = "SQLAlchemy-2.0.26-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7ee16afd083bb6bb5ab3962ac7f0eafd1d196c6399388af35fef3d1c6d6d9bb"}, + {file = "SQLAlchemy-2.0.26-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379af901ceb524cbee5e15c1713bf9fd71dc28053286b7917525d01b938b9628"}, + {file = "SQLAlchemy-2.0.26-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94a78f56ea13f4d6e9efcd2a2d08cc13531918e0516563f6303c4ad98c81e21d"}, + {file = "SQLAlchemy-2.0.26-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a481cc2eec83776ff7b6bb12c8e85d0378af0e2ec4584ac3309365a2a380c64b"}, + {file = "SQLAlchemy-2.0.26-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8cbeb0e49b605cd75f825fb9239a554803ef2bef1a7b2a8b428926ed518b6b63"}, + {file = "SQLAlchemy-2.0.26-cp311-cp311-win32.whl", hash = "sha256:e70cce65239089390c193a7b0d171ce89d2e3dedf797f8010031b2aa2b1e9c80"}, + {file = "SQLAlchemy-2.0.26-cp311-cp311-win_amd64.whl", hash = "sha256:750d1ef39d50520527c45c309c3cb10bbfa6131f93081b4e93858abb5ece2501"}, + {file = "SQLAlchemy-2.0.26-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b39503c3a56e1b2340a7d09e185ddb60b253ad0210877a9958ac64208eb23674"}, + {file = "SQLAlchemy-2.0.26-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a870e6121a052f826f7ae1e4f0b54ca4c0ccd613278218ca036fa5e0f3be7df"}, + {file = "SQLAlchemy-2.0.26-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5901eed6d0e23ca4b04d66a561799d4f0fe55fcbfc7ca203bb8c3277f442085b"}, + {file = "SQLAlchemy-2.0.26-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25fe55aab9b20ae4a9523bb269074202be9d92a145fcc0b752fff409754b5f6"}, + {file = "SQLAlchemy-2.0.26-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5310958d08b4bafc311052be42a3b7d61a93a2bf126ddde07b85f712e7e4ac7b"}, + {file = "SQLAlchemy-2.0.26-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd133afb7e6c59fad365ffa97fb06b1001f88e29e1de351bef3d2b1224e2f132"}, + {file = "SQLAlchemy-2.0.26-cp312-cp312-win32.whl", hash = "sha256:dc32ecf643c4904dd413e6a95a3f2c8a89ccd6f15083e586dcf8f42eb4e317ae"}, + {file = "SQLAlchemy-2.0.26-cp312-cp312-win_amd64.whl", hash = "sha256:6e25f029e8ad6d893538b5abe8537e7f09e21d8e96caee46a7e2199f3ddd77b0"}, + {file = "SQLAlchemy-2.0.26-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:99a9a8204b8937aa72421e31c493bfc12fd063a8310a0522e5a9b98e6323977c"}, + {file = "SQLAlchemy-2.0.26-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:691d68a4fca30c9a676623d094b600797699530e175b6524a9f57e3273f5fa8d"}, + {file = "SQLAlchemy-2.0.26-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79a74a4ca4310c812f97bf0f13ce00ed73c890954b5a20b32484a9ab60e567e9"}, + {file = "SQLAlchemy-2.0.26-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2efbbeb18c0e1c53b670a46a009fbde7b58e05b397a808c7e598532b17c6f4b"}, + {file = "SQLAlchemy-2.0.26-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3fc557f5402206c18ec3d288422f8e5fa764306d49f4efbc6090a7407bf54938"}, + {file = "SQLAlchemy-2.0.26-cp37-cp37m-win32.whl", hash = "sha256:a9846ffee3283cff4ec476e7ee289314290fcb2384aab5045c6f481c5c4d011f"}, + {file = "SQLAlchemy-2.0.26-cp37-cp37m-win_amd64.whl", hash = "sha256:ed4667d3d5d6e203a271d684d5b213ebcd618f7a8bc605752a8865eb9e67a79a"}, + {file = "SQLAlchemy-2.0.26-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79e629df3f69f849a1482a2d063596b23e32036b83547397e68725e6e0d0a9ab"}, + {file = "SQLAlchemy-2.0.26-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4b4d848b095173e0a9e377127b814490499e55f5168f617ae2c07653c326b9d1"}, + {file = "SQLAlchemy-2.0.26-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f06afe8e96d7f221cc0b59334dc400151be22f432785e895e37030579d253c3"}, + {file = "SQLAlchemy-2.0.26-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f75ac12d302205e60f77f46bd162d40dc37438f1f8db160d2491a78b19a0bd61"}, + {file = "SQLAlchemy-2.0.26-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec3717c1efee8ad4b97f6211978351de3abe1e4b5f73e32f775c7becec021c5c"}, + {file = "SQLAlchemy-2.0.26-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06ed4d6bb2365222fb9b0a05478a2d23ad8c1dd874047a9ae1ca1d45f18a255e"}, + {file = "SQLAlchemy-2.0.26-cp38-cp38-win32.whl", hash = "sha256:caa79a6caeb4a3cc4ddb9aba9205c383f5d3bcb60d814e87e74570514754e073"}, + {file = "SQLAlchemy-2.0.26-cp38-cp38-win_amd64.whl", hash = "sha256:996b41c38e34a980e9f810d6e2709a3196e29ee34e46e3c16f96c63da10a9da1"}, + {file = "SQLAlchemy-2.0.26-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4f57af0866f6629eae2d24d022ba1a4c1bac9b16d45027bbfcda4c9d5b0d8f26"}, + {file = "SQLAlchemy-2.0.26-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e1a532bc33163fb19c4759a36504a23e63032bc8d47cee1c66b0b70a04a0957b"}, + {file = "SQLAlchemy-2.0.26-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02a4f954ccb17bd8cff56662efc806c5301508233dc38d0253a5fdb2f33ca3ba"}, + {file = "SQLAlchemy-2.0.26-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a678f728fb075e74aaa7fdc27f8af8f03f82d02e7419362cc8c2a605c16a4114"}, + {file = "SQLAlchemy-2.0.26-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8b39462c9588d4780f041e1b84d2ba038ac01c441c961bbee622dd8f53dec69f"}, + {file = "SQLAlchemy-2.0.26-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98f4d0d2bda2921af5b0c2ca99207cdab00f2922da46a6336c62c8d6814303a7"}, + {file = "SQLAlchemy-2.0.26-cp39-cp39-win32.whl", hash = "sha256:6d68e6b507a3dd20c0add86ac0a0ca061d43c9a0162a122baa5fe952f14240f1"}, + {file = "SQLAlchemy-2.0.26-cp39-cp39-win_amd64.whl", hash = "sha256:fb97a9b93b953084692a52a7877957b7a88dfcedc0c5652124f5aebf5999f7fe"}, + {file = "SQLAlchemy-2.0.26-py3-none-any.whl", hash = "sha256:1128b2cdf49107659f6d1f452695f43a20694cc9305a86e97b70793a1c74eeb4"}, + {file = "SQLAlchemy-2.0.26.tar.gz", hash = "sha256:e1bcd8fcb30305e27355d553608c2c229d3e589fb7ff406da7d7e5d50fa14d0d"}, ] [package.dependencies] From 3a158957b692c7a962bee46905ea1b64c5bffd5e Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 13 Feb 2024 13:38:59 +0100 Subject: [PATCH 107/169] Add test to capture Duckdb behavior (#389) * Add test to capture Duckdb behavior * Fix timezone issue --- tests/catalog/test_sql.py | 6 ---- tests/conftest.py | 6 ++++ tests/integration/test_writes.py | 61 ++++++++++++++++++++++++++++++++ 3 files changed, 67 insertions(+), 6 deletions(-) diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 1d127378be..f6ff78283b 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -21,7 +21,6 @@ import pyarrow as pa import pytest -from pytest import TempPathFactory from pytest_lazyfixture import lazy_fixture from sqlalchemy.exc import ArgumentError, IntegrityError @@ -51,11 +50,6 @@ from pyiceberg.types import IntegerType -@pytest.fixture(name="warehouse", scope="session") -def fixture_warehouse(tmp_path_factory: TempPathFactory) -> Path: - return tmp_path_factory.mktemp("test_sql") - - @pytest.fixture(name="random_identifier") def fixture_random_identifier(warehouse: Path, database_name: str, table_name: str) -> Identifier: os.makedirs(f"{warehouse}/{database_name}.db/{table_name}/metadata/", exist_ok=True) diff --git a/tests/conftest.py b/tests/conftest.py index c0c3d10273..0e0fbd6836 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -31,6 +31,7 @@ import string import uuid from datetime import datetime +from pathlib import Path from random import choice from tempfile import TemporaryDirectory from typing import ( @@ -1929,6 +1930,11 @@ def example_task(data_file: str) -> FileScanTask: ) +@pytest.fixture(scope="session") +def warehouse(tmp_path_factory: pytest.TempPathFactory) -> Path: + return tmp_path_factory.mktemp("test_sql") + + @pytest.fixture def table_v1(example_table_metadata_v1: Dict[str, Any]) -> Table: table_metadata = TableMetadataV1(**example_table_metadata_v1) diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index 58ab830319..54b647b8ed 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -15,19 +15,24 @@ # specific language governing permissions and limitations # under the License. # pylint:disable=redefined-outer-name +import os +import time import uuid from datetime import date, datetime +from pathlib import Path from typing import Any, Dict, List from urllib.parse import urlparse import pyarrow as pa import pyarrow.parquet as pq import pytest +import pytz from pyarrow.fs import S3FileSystem from pyspark.sql import SparkSession from pytest_mock.plugin import MockerFixture from pyiceberg.catalog import Catalog, Properties, Table, load_catalog +from pyiceberg.catalog.sql import SqlCatalog from pyiceberg.exceptions import NamespaceAlreadyExistsError, NoSuchTableError from pyiceberg.schema import Schema from pyiceberg.types import ( @@ -573,3 +578,59 @@ def test_summaries_with_only_nulls( 'total-position-deletes': '0', 'total-records': '0', } + + +@pytest.mark.integration +def test_duckdb_url_import(warehouse: Path, arrow_table_with_null: pa.Table) -> None: + os.environ['TZ'] = 'Etc/UTC' + time.tzset() + tz = pytz.timezone(os.environ['TZ']) + + catalog = SqlCatalog("test_sql_catalog", uri="sqlite:///:memory:", warehouse=f"/{warehouse}") + catalog.create_namespace("default") + + identifier = "default.arrow_table_v1_with_null" + tbl = _create_table(catalog, identifier, {}, [arrow_table_with_null]) + location = tbl.metadata_location + + import duckdb + + duckdb.sql('INSTALL iceberg; LOAD iceberg;') + result = duckdb.sql( + f""" + SELECT * + FROM iceberg_scan('{location}') + """ + ).fetchall() + + assert result == [ + ( + False, + 'a', + 'aaaaaaaaaaaaaaaaaaaaaa', + 1, + 1, + 0.0, + 0.0, + datetime(2023, 1, 1, 19, 25), + datetime(2023, 1, 1, 19, 25, tzinfo=tz), + date(2023, 1, 1), + b'\x01', + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', + ), + (None, None, None, None, None, None, None, None, None, None, None, None), + ( + True, + 'z', + 'zzzzzzzzzzzzzzzzzzzzzz', + 9, + 9, + 0.8999999761581421, + 0.9, + datetime(2023, 3, 1, 19, 25), + datetime(2023, 3, 1, 19, 25, tzinfo=tz), + date(2023, 3, 1), + b'\x12', + b'\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11', + ), + ] From fbdf04b0f4d83719b26facbac25b4695ff8f0629 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 13 Feb 2024 23:18:59 +0100 Subject: [PATCH 108/169] Update `LICENSE` and `NOTICE` (#413) * Revert "Add Thrift and Hive to NOTICE (#410)" This reverts commit e9e265a1878a55a4385580bf800382fd75c912c9. * Update year --- LICENSE | 33 +++++++++++++++++++++++++++++++++ NOTICE | 29 ----------------------------- 2 files changed, 33 insertions(+), 29 deletions(-) diff --git a/LICENSE b/LICENSE index d645695673..ffdd12aad2 100644 --- a/LICENSE +++ b/LICENSE @@ -200,3 +200,36 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +-------------------------------------------------------------------------------- + +This product includes code from Apache Avro. + +* Code for initializing the Avro (de)compression codecs +* The Binary decoder for reading in an Avro byte stream + +Copyright: 2014-2022 The Apache Software Foundation. +Home page: https://avro.apache.org/ +License: https://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This product includes code from Apache Thrift. + +* Uses the fb303.thrift file that's part of Hive's thrift service in vendor/fb303/ + +Copyright: 2006-2022 The Apache Software Foundation. +Home page: https://thrift.apache.org/ +License: https://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This product includes code from Apache Hive. + +* Uses hive_metastore.thrift to generate the Hive Metastore client in vendor/hive_metastore/ + +Copyright: 2008-2022 The Apache Software Foundation. +Home page: https://hive.apache.org/ +License: https://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- diff --git a/NOTICE b/NOTICE index 9acae0de12..adcae2d516 100644 --- a/NOTICE +++ b/NOTICE @@ -6,32 +6,3 @@ This product includes software developed at The Apache Software Foundation (http://www.apache.org/). -------------------------------------------------------------------------------- - -This product includes code from Apache Avro. - -* Code for initializing the Avro (de)compression codecs -* The Binary decoder for reading in an Avro byte stream - -Copyright: 2014-2022 The Apache Software Foundation. -Home page: https://avro.apache.org/ -License: https://www.apache.org/licenses/LICENSE-2.0 - --------------------------------------------------------------------------------- - -This product includes code from Apache Thrift. - -* Uses the fb303.thrift file that's part of Hive's thrift service in vendor/fb303/ - -Copyright: 2006-2022 The Apache Software Foundation. -Home page: https://thrift.apache.org/ -License: https://www.apache.org/licenses/LICENSE-2.0 - --------------------------------------------------------------------------------- - -This product includes code from Apache Hive. - -* Uses hive_metastore.thrift to generate the Hive Metastore client in vendor/hive_metastore/ - -Copyright: 2008-2022 The Apache Software Foundation. -Home page: https://hive.apache.org/ -License: https://www.apache.org/licenses/LICENSE-2.0 From 0c0fbd2487b74850621c9580b8c27fc030e18e6e Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 13 Feb 2024 23:19:40 +0100 Subject: [PATCH 109/169] Docs: Add section on fetching the remote (#421) Before creating the tag --- mkdocs/docs/how-to-release.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/mkdocs/docs/how-to-release.md b/mkdocs/docs/how-to-release.md index 9cecad2d5d..5a2072ec03 100644 --- a/mkdocs/docs/how-to-release.md +++ b/mkdocs/docs/how-to-release.md @@ -29,7 +29,17 @@ Make sure that the version is correct in `pyproject.toml` and `pyiceberg/__init_ ### Setting the tag -First set the tag on the commit: +Make sure that you're on the right branch, and the latest branch: + +For a Major/Minor release, make sure that you're on `main`, for patch versions the branch corresponding to the version that you want to patch, i.e. `pyiceberg-0.6.x`. + +```bash +git checkout +git fetch --all +git reset --hard apache/ +``` + +Set the tag on the last commit: ```bash export RC=rc1 From 970c977bd8a9d3942efd5e4f1d9c418f22c4d527 Mon Sep 17 00:00:00 2001 From: Hussein Awala Date: Tue, 13 Feb 2024 23:38:14 +0100 Subject: [PATCH 110/169] Chore: build wheels for all macos versions including arm macos-14 (#416) * Chore: build wheels for all macos versions including arm macos-14 * Bump Python version in the CI to 3.11 --- .github/workflows/python-release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index a499446d4e..f1f1e0c2e6 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -34,7 +34,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ ubuntu-22.04, windows-2022, macos-11 ] + os: [ ubuntu-22.04, windows-2022, macos-11, macos-12, macos-13, macos-14 ] steps: - uses: actions/checkout@v4 @@ -43,7 +43,7 @@ jobs: - uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.11' - name: Install poetry run: pip install poetry From dd0d078cd242c9399fcfd74f35159026d7670d69 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 08:38:13 +0100 Subject: [PATCH 111/169] Build: Bump fastavro from 1.9.3 to 1.9.4 (#427) Bumps [fastavro](https://github.com/fastavro/fastavro) from 1.9.3 to 1.9.4. - [Release notes](https://github.com/fastavro/fastavro/releases) - [Changelog](https://github.com/fastavro/fastavro/blob/master/ChangeLog) - [Commits](https://github.com/fastavro/fastavro/compare/1.9.3...1.9.4) --- updated-dependencies: - dependency-name: fastavro dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 66 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/poetry.lock b/poetry.lock index dd02b4bde3..70f417a97b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -963,42 +963,42 @@ test = ["pytest (>=6)"] [[package]] name = "fastavro" -version = "1.9.3" +version = "1.9.4" description = "Fast read/write of AVRO files" optional = false python-versions = ">=3.8" files = [ - {file = "fastavro-1.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5e9b2e1427fb84c0754bc34923d10cabcf2ed23230201208a1371ab7b6027674"}, - {file = "fastavro-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ef82f86ae276309abc0072598474b6be68105a0b28f8d7cc0398d1d353d7de"}, - {file = "fastavro-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280ef7ab7232ecb2097038d6842416ec717d0e1c314b80ff245f85201f3396a4"}, - {file = "fastavro-1.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a36cfc0421ed7576ecb1c22de7bd1dedcce62aebbffcc597379d59171e5d76e"}, - {file = "fastavro-1.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d80f2e20199140eb8c036b4393e9bc9eff325543311b958c72318999499d4279"}, - {file = "fastavro-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:a435f7edd7c5b52cee3f23ca950cd9373ab35cf2aa3d269b3d6aca7e2fc1372c"}, - {file = "fastavro-1.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a7053ed10194ec53754f5337b57b3273a74b48505edcd6edb79fe3c4cd259c0"}, - {file = "fastavro-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853e01f13534d1baa0a3d493a8573e665e93ffa35b4bf1d125e21764d343af8e"}, - {file = "fastavro-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5a279cda25d876e6f120950cadf184a307fd8998f9a22a90bb62e6749f88d1e"}, - {file = "fastavro-1.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:63d6f928840f3fb1f2e1fe20bc8b7d0e1a51ba4bb0e554ecb837a669fba31288"}, - {file = "fastavro-1.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8807046edc78f50b3ea5f55f6a534c87b2a13538e7c56fec3532ef802bcae333"}, - {file = "fastavro-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:e502579da4a51c5630eadbd811a1b3d262d6e783bf19998cfb33d2ea0cf6f516"}, - {file = "fastavro-1.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6b665efe442061df8d9608c2fb692847df85d52ad825b776c441802f0dfa6571"}, - {file = "fastavro-1.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b8c96d81f0115633489d7f1133a03832922629a61ca81c1d47b482ddcda3b94"}, - {file = "fastavro-1.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338c7ec94dd2474c4679e44d2560a1922cb6fa99acbb7b18957264baf8eadfc7"}, - {file = "fastavro-1.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a509b34c9af71a109c633631ac2f6d2209830e13200d0048f7e9c057fd563f8f"}, - {file = "fastavro-1.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:967edefab470987c024cd5a1fcd04744a50a91e740c7bdf325181043a47f1083"}, - {file = "fastavro-1.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:033c15e8ed02f80f01d58be1cd880b09fd444faf277263d563a727711d47a98a"}, - {file = "fastavro-1.9.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:6b38723327603d77080aec56628e13a739415f8596ca0cc41a905615977c6d6b"}, - {file = "fastavro-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046d75c4400941fd08f0a6855a34ae63bf02ea01f366b5b749942abe10640056"}, - {file = "fastavro-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ab312b8baf0e61ee717878d390022ee1b713d70b244d69efbf3325680f9749"}, - {file = "fastavro-1.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c562fcf8f5091a2446aafd0c2a0da590c24e0b53527a0100d33908e32f20eea8"}, - {file = "fastavro-1.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2aa0111e7ebd076d2a094862bbdf8ea175cebba148fcce6c89ff46b625e334b4"}, - {file = "fastavro-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:652072e0f455ca19a1ee502b527e603389783657c130d81f89df66775979d6f5"}, - {file = "fastavro-1.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0a57cdd4edaee36d4216faf801ebc7f53f45e4e1518bdd9832d6f6f1d6e2d88f"}, - {file = "fastavro-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b46a18ebed61573b0823c28eda2716485d283258a83659c7fe6ad3aaeacfed4"}, - {file = "fastavro-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f756f0723f3bd97db20437d0a8e45712839e6ccd7c82f4d82469533be48b4c7"}, - {file = "fastavro-1.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d98d5a08063f5b6d7ac5016a0dfe0698b50d9987cb74686f7dfa8288b7b09e0b"}, - {file = "fastavro-1.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:00698e60db58a2d52cb709df882d451fb7664ebb2f8cb37d9171697e060dc767"}, - {file = "fastavro-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:d021bbc135023194688e88a7431fb0b5e3ce20e27153bf258f2ce08ee1a0106b"}, - {file = "fastavro-1.9.3.tar.gz", hash = "sha256:a30d3d2353f6d3b4f6dcd6a97ae937b3775faddd63f5856fe11ba3b0dbb1756a"}, + {file = "fastavro-1.9.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:60cb38f07462a7fb4e4440ed0de67d3d400ae6b3d780f81327bebde9aa55faef"}, + {file = "fastavro-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:063d01d197fc929c20adc09ca9f0ca86d33ac25ee0963ce0b438244eee8315ae"}, + {file = "fastavro-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a9053fcfbc895f2a16a4303af22077e3a8fdcf1cd5d6ed47ff2ef22cbba2f0"}, + {file = "fastavro-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:02bf1276b7326397314adf41b34a4890f6ffa59cf7e0eb20b9e4ab0a143a1598"}, + {file = "fastavro-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56bed9eca435389a8861e6e2d631ec7f8f5dda5b23f93517ac710665bd34ca29"}, + {file = "fastavro-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:0cd2099c8c672b853e0b20c13e9b62a69d3fbf67ee7c59c7271ba5df1680310d"}, + {file = "fastavro-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:af8c6d8c43a02b5569c093fc5467469541ac408c79c36a5b0900d3dd0b3ba838"}, + {file = "fastavro-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a138710bd61580324d23bc5e3df01f0b82aee0a76404d5dddae73d9e4c723f"}, + {file = "fastavro-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:903d97418120ca6b6a7f38a731166c1ccc2c4344ee5e0470d09eb1dc3687540a"}, + {file = "fastavro-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c443eeb99899d062dbf78c525e4614dd77e041a7688fa2710c224f4033f193ae"}, + {file = "fastavro-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ac26ab0774d1b2b7af6d8f4300ad20bbc4b5469e658a02931ad13ce23635152f"}, + {file = "fastavro-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:cf7247874c22be856ba7d1f46a0f6e0379a6025f1a48a7da640444cbac6f570b"}, + {file = "fastavro-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:68912f2020e1b3d70557260b27dd85fb49a4fc6bfab18d384926127452c1da4c"}, + {file = "fastavro-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6925ce137cdd78e109abdb0bc33aad55de6c9f2d2d3036b65453128f2f5f5b92"}, + {file = "fastavro-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b928cd294e36e35516d0deb9e104b45be922ba06940794260a4e5dbed6c192a"}, + {file = "fastavro-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:90c9838bc4c991ffff5dd9d88a0cc0030f938b3fdf038cdf6babde144b920246"}, + {file = "fastavro-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:eca6e54da571b06a3c5a72dbb7212073f56c92a6fbfbf847b91c347510f8a426"}, + {file = "fastavro-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a4b02839ac261100cefca2e2ad04cdfedc556cb66b5ec735e0db428e74b399de"}, + {file = "fastavro-1.9.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4451ee9a305a73313a1558d471299f3130e4ecc10a88bf5742aa03fb37e042e6"}, + {file = "fastavro-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8524fccfb379565568c045d29b2ebf71e1f2c0dd484aeda9fe784ef5febe1a8"}, + {file = "fastavro-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d0a00a6e09baa20f6f038d7a2ddcb7eef0e7a9980e947a018300cb047091b8"}, + {file = "fastavro-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:23d7e5b29c9bf6f26e8be754b2c8b919838e506f78ef724de7d22881696712fc"}, + {file = "fastavro-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e6ab3ee53944326460edf1125b2ad5be2fadd80f7211b13c45fa0c503b4cf8d"}, + {file = "fastavro-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:64d335ec2004204c501f8697c385d0a8f6b521ac82d5b30696f789ff5bc85f3c"}, + {file = "fastavro-1.9.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:7e05f44c493e89e73833bd3ff3790538726906d2856f59adc8103539f4a1b232"}, + {file = "fastavro-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:253c63993250bff4ee7b11fb46cf3a4622180a783bedc82a24c6fdcd1b10ca2a"}, + {file = "fastavro-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24d6942eb1db14640c2581e0ecd1bbe0afc8a83731fcd3064ae7f429d7880cb7"}, + {file = "fastavro-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d47bb66be6091cd48cfe026adcad11c8b11d7d815a2949a1e4ccf03df981ca65"}, + {file = "fastavro-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c293897f12f910e58a1024f9c77f565aa8e23b36aafda6ad8e7041accc57a57f"}, + {file = "fastavro-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:f05d2afcb10a92e2a9e580a3891f090589b3e567fdc5641f8a46a0b084f120c3"}, + {file = "fastavro-1.9.4.tar.gz", hash = "sha256:56b8363e360a1256c94562393dc7f8611f3baf2b3159f64fb2b9c6b87b14e876"}, ] [package.extras] @@ -4320,4 +4320,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "d063b6db5f333e4cf881a9778b692e1a9d7926d629141cc8d103520c1fccc6f5" +content-hash = "d6213b5486c28836484089edadcf1b69f3cce1e44bf3e426a8ccab7cdddd246b" diff --git a/pyproject.toml b/pyproject.toml index 2717994127..a07c51df02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest = "7.4.4" pytest-checkdocs = "2.10.1" pytest-lazy-fixture = "0.6.3" pre-commit = "3.5.0" -fastavro = "1.9.3" +fastavro = "1.9.4" coverage = { version = "^7.4.1", extras = ["toml"] } requests-mock = "1.11.0" moto = { version = "^5.0.1", extras = ["server"] } From b3d2d225f5f310df924d4002fdfcd556cce02ead Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 08:38:24 +0100 Subject: [PATCH 112/169] Build: Bump getdaft from 0.2.13 to 0.2.14 (#426) Bumps [getdaft](https://github.com/Eventual-Inc/Daft) from 0.2.13 to 0.2.14. - [Release notes](https://github.com/Eventual-Inc/Daft/releases) - [Commits](https://github.com/Eventual-Inc/Daft/compare/v0.2.13...v0.2.14) --- updated-dependencies: - dependency-name: getdaft dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 70f417a97b..1ec5c76f46 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1211,17 +1211,17 @@ gcsfuse = ["fusepy"] [[package]] name = "getdaft" -version = "0.2.13" -description = "A Distributed DataFrame library for large scale complex data processing." +version = "0.2.14" +description = "Distributed Dataframes for Multimodal Data" optional = true python-versions = ">=3.7" files = [ - {file = "getdaft-0.2.13-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:e7b7648642ddafeb34f9f91e1f847ed9b21244c33c05f98e63b77ef241c3672e"}, - {file = "getdaft-0.2.13-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:7bdeb828a20d9e2507a378ae3312a9071da166df1ab6ef3a4d8b6073eff685ad"}, - {file = "getdaft-0.2.13-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c20447188e83a1fd9b5e8edbe75b0435d08299dca00e8106ebb5c39b66e5f47c"}, - {file = "getdaft-0.2.13-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef015020b9de596d5ebfe2855f55ce56f8fb44ff2ed003f803e14fc4163a3638"}, - {file = "getdaft-0.2.13-cp37-abi3-win_amd64.whl", hash = "sha256:82a3bb1d08f8dad9a87c33445b87a16fbad253507e055a0dd7a16574d230b7b1"}, - {file = "getdaft-0.2.13.tar.gz", hash = "sha256:a814012436513becfdb8bd173868a5fb7d7582085d4be4d782703f9219add786"}, + {file = "getdaft-0.2.14-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:01754aa7f6059cfec363eff9a8e46197bbbc65634da9c12cd998ce2223fb8c45"}, + {file = "getdaft-0.2.14-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:8cf445538b7e0d5016c548b0e951dab3f5cf2fe6303bddd09d9c5ff5747894e1"}, + {file = "getdaft-0.2.14-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3add9d73702765e261d3e88ef04099d6a9c95f90d19c96ff4e014f4d9ed6433e"}, + {file = "getdaft-0.2.14-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:110b0b35bc9732926f9a8b9c3f6996d834a0f24ccb0c400f9b03d580dcd90096"}, + {file = "getdaft-0.2.14-cp37-abi3-win_amd64.whl", hash = "sha256:34ebbbf040982f219c3bae5e2046322150332a3e65ad67ba71985db59b2fff68"}, + {file = "getdaft-0.2.14.tar.gz", hash = "sha256:07cd349fe961536c6bd172b6cf02217fb1fd23a2a021571f6a5c6f0dddb184ec"}, ] [package.dependencies] From 6507a874f8ef8e95305a22afd75c7871d20fde00 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 08:38:32 +0100 Subject: [PATCH 113/169] Build: Bump duckdb from 0.9.2 to 0.10.0 (#425) Bumps [duckdb](https://github.com/duckdb/duckdb) from 0.9.2 to 0.10.0. - [Release notes](https://github.com/duckdb/duckdb/releases) - [Changelog](https://github.com/duckdb/duckdb/blob/main/tools/release-pip.py) - [Commits](https://github.com/duckdb/duckdb/compare/v0.9.2...v0.10.0) --- updated-dependencies: - dependency-name: duckdb dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 90 +++++++++++++++++++++++++++++------------------------ 1 file changed, 49 insertions(+), 41 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1ec5c76f46..8843210c5b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -883,50 +883,58 @@ files = [ [[package]] name = "duckdb" -version = "0.9.2" -description = "DuckDB embedded database" +version = "0.10.0" +description = "DuckDB in-process database" optional = true python-versions = ">=3.7.0" files = [ - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"}, - {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"}, - {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"}, - {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"}, - {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"}, - {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"}, - {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"}, - {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"}, - {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"}, - {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"}, - {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"}, - {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"}, - {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"}, - {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"}, - {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"}, - {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"}, - {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"}, - {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bd0ffb3fddef0f72a150e4d76e10942a84a1a0447d10907df1621b90d6668060"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f3d709d5c7c1a12b5e10d0b05fa916c670cd2b50178e3696faa0cc16048a1745"}, + {file = "duckdb-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9114aa22ec5d591a20ce5184be90f49d8e5b5348ceaab21e102c54560d07a5f8"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a37877efadf39caf7cadde0f430fedf762751b9c54750c821e2f1316705a21"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87cbc9e1d9c3fc9f14307bea757f99f15f46843c0ab13a6061354410824ed41f"}, + {file = "duckdb-0.10.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f0bfec79fed387201550517d325dff4fad2705020bc139d936cab08b9e845662"}, + {file = "duckdb-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5622134d2d9796b15e09de810e450859d4beb46d9b861357ec9ae40a61b775c"}, + {file = "duckdb-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:089ee8e831ccaef1b73fc89c43b661567175eed0115454880bafed5e35cda702"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a05af63747f1d7021995f0811c333dee7316cec3b06c0d3e4741b9bdb678dd21"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:072d6eba5d8a59e0069a8b5b4252fed8a21f9fe3f85a9129d186a39b3d0aea03"}, + {file = "duckdb-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a77b85668f59b919042832e4659538337f1c7f197123076c5311f1c9cf077df7"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a666f1d2da65d03199a977aec246920920a5ea1da76b70ae02bd4fb1ffc48c"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ec76a4262b783628d26612d184834852d9c92fb203e91af789100c17e3d7173"}, + {file = "duckdb-0.10.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:009dd9d2cdbd3b061a9efbdfc79f2d1a8377bcf49f1e5f430138621f8c083a6c"}, + {file = "duckdb-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:878f06766088090dad4a2e5ee0081555242b2e8dcb29415ecc97e388cf0cf8d8"}, + {file = "duckdb-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:713ff0a1fb63a6d60f454acf67f31656549fb5d63f21ac68314e4f522daa1a89"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9c0ee450dfedfb52dd4957244e31820feef17228da31af6d052979450a80fd19"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ff79b2ea9994398b545c0d10601cd73565fbd09f8951b3d8003c7c5c0cebc7cb"}, + {file = "duckdb-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6bdf1aa71b924ef651062e6b8ff9981ad85bec89598294af8a072062c5717340"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0265bbc8216be3ced7b377ba8847128a3fc0ef99798a3c4557c1b88e3a01c23"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d418a315a07707a693bd985274c0f8c4dd77015d9ef5d8d3da4cc1942fd82e0"}, + {file = "duckdb-0.10.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2828475a292e68c71855190b818aded6bce7328f79e38c04a0c75f8f1c0ceef0"}, + {file = "duckdb-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c3aaeaae2eba97035c65f31ffdb18202c951337bf2b3d53d77ce1da8ae2ecf51"}, + {file = "duckdb-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:c51790aaaea97d8e4a58a114c371ed8d2c4e1ca7cbf29e3bdab6d8ccfc5afc1e"}, + {file = "duckdb-0.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8af1ae7cc77a12206b6c47ade191882cc8f49f750bb3e72bb86ac1d4fa89926a"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa4f7e8e8dc0e376aeb280b83f2584d0e25ec38985c27d19f3107b2edc4f4a97"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28ae942a79fad913defa912b56483cd7827a4e7721f4ce4bc9025b746ecb3c89"}, + {file = "duckdb-0.10.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01b57802898091455ca2a32c1335aac1e398da77c99e8a96a1e5de09f6a0add9"}, + {file = "duckdb-0.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52e1ad4a55fa153d320c367046b9500578192e01c6d04308ba8b540441736f2c"}, + {file = "duckdb-0.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:904c47d04095af745e989c853f0bfc0776913dfc40dfbd2da7afdbbb5f67fed0"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:184ae7ea5874f3b8fa51ab0f1519bdd088a0b78c32080ee272b1d137e2c8fd9c"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd33982ecc9bac727a032d6cedced9f19033cbad56647147408891eb51a6cb37"}, + {file = "duckdb-0.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f59bf0949899105dd5f8864cb48139bfb78454a8c017b8258ba2b5e90acf7afc"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:395f3b18948001e35dceb48a4423d574e38656606d033eef375408b539e7b076"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b8eb2b803be7ee1df70435c33b03a4598cdaf676cd67ad782b288dcff65d781"}, + {file = "duckdb-0.10.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:31b2ddd331801064326c8e3587a4db8a31d02aef11332c168f45b3bd92effb41"}, + {file = "duckdb-0.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c8b89e76a041424b8c2026c5dc1f74b53fbbc6c6f650d563259885ab2e7d093d"}, + {file = "duckdb-0.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:79084a82f16c0a54f6bfb7ded5600400c2daa90eb0d83337d81a56924eaee5d4"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:79799b3a270dcd9070f677ba510f1e66b112df3068425691bac97c5e278929c7"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8fc394bfe3434920cdbcfbdd0ac3ba40902faa1dbda088db0ba44003a45318a"}, + {file = "duckdb-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c116605551b4abf5786243a59bcef02bd69cc51837d0c57cafaa68cdc428aa0c"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3191170c3b0a43b0c12644800326f5afdea00d5a4621d59dbbd0c1059139e140"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fee69a50eb93c72dc77e7ab1fabe0c38d21a52c5da44a86aa217081e38f9f1bd"}, + {file = "duckdb-0.10.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5f449e87dacb16b0d145dbe65fa6fdb5a55b2b6911a46d74876e445dd395bac"}, + {file = "duckdb-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4487d0df221b17ea4177ad08131bc606b35f25cfadf890987833055b9d10cdf6"}, + {file = "duckdb-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:c099ae2ff8fe939fda62da81704f91e2f92ac45e48dc0e37c679c9d243d01e65"}, + {file = "duckdb-0.10.0.tar.gz", hash = "sha256:c02bcc128002aa79e3c9d89b9de25e062d1096a8793bc0d7932317b7977f6845"}, ] [[package]] From b1f3f0638872e0bd422ad0962accb8b5a5b24f38 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 08:41:58 +0100 Subject: [PATCH 114/169] Build: Bump sqlalchemy from 2.0.26 to 2.0.27 (#424) Bumps [sqlalchemy](https://github.com/sqlalchemy/sqlalchemy) from 2.0.26 to 2.0.27. - [Release notes](https://github.com/sqlalchemy/sqlalchemy/releases) - [Changelog](https://github.com/sqlalchemy/sqlalchemy/blob/main/CHANGES.rst) - [Commits](https://github.com/sqlalchemy/sqlalchemy/commits) --- updated-dependencies: - dependency-name: sqlalchemy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 100 ++++++++++++++++++++++++++-------------------------- 1 file changed, 50 insertions(+), 50 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8843210c5b..8ce5846003 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3767,60 +3767,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.26" +version = "2.0.27" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.26-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56524d767713054f8758217b3a811f6a736e0ae34e7afc33b594926589aa9609"}, - {file = "SQLAlchemy-2.0.26-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2d8a2c68b279617f13088bdc0fc0e9b5126f8017f8882ff08ee41909fab0713"}, - {file = "SQLAlchemy-2.0.26-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d377645913d47f0dc802b415bcfe7fb085d86646a12278d77c12eb75b5e1b4"}, - {file = "SQLAlchemy-2.0.26-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc0628d2026926404dabc903dc5628f7d936a792aa3a1fc54a20182df8e2172"}, - {file = "SQLAlchemy-2.0.26-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:872f2907ade52601a1e729e85d16913c24dc1f6e7c57d11739f18dcfafde29db"}, - {file = "SQLAlchemy-2.0.26-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba46fa770578b3cf3b5b77dadb7e94fda7692dd4d1989268ef3dcb65f31c40a3"}, - {file = "SQLAlchemy-2.0.26-cp310-cp310-win32.whl", hash = "sha256:651d10fdba7984bf100222d6e4acc496fec46493262b6170be1981ef860c6184"}, - {file = "SQLAlchemy-2.0.26-cp310-cp310-win_amd64.whl", hash = "sha256:8f95ede696ab0d7328862d69f29b643d35b668c4f3619cb2f0281adc16e64c1b"}, - {file = "SQLAlchemy-2.0.26-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fab1bb909bd24accf2024a69edd4f885ded182c079c4dbcd515b4842f86b07cb"}, - {file = "SQLAlchemy-2.0.26-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7ee16afd083bb6bb5ab3962ac7f0eafd1d196c6399388af35fef3d1c6d6d9bb"}, - {file = "SQLAlchemy-2.0.26-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379af901ceb524cbee5e15c1713bf9fd71dc28053286b7917525d01b938b9628"}, - {file = "SQLAlchemy-2.0.26-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94a78f56ea13f4d6e9efcd2a2d08cc13531918e0516563f6303c4ad98c81e21d"}, - {file = "SQLAlchemy-2.0.26-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a481cc2eec83776ff7b6bb12c8e85d0378af0e2ec4584ac3309365a2a380c64b"}, - {file = "SQLAlchemy-2.0.26-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8cbeb0e49b605cd75f825fb9239a554803ef2bef1a7b2a8b428926ed518b6b63"}, - {file = "SQLAlchemy-2.0.26-cp311-cp311-win32.whl", hash = "sha256:e70cce65239089390c193a7b0d171ce89d2e3dedf797f8010031b2aa2b1e9c80"}, - {file = "SQLAlchemy-2.0.26-cp311-cp311-win_amd64.whl", hash = "sha256:750d1ef39d50520527c45c309c3cb10bbfa6131f93081b4e93858abb5ece2501"}, - {file = "SQLAlchemy-2.0.26-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b39503c3a56e1b2340a7d09e185ddb60b253ad0210877a9958ac64208eb23674"}, - {file = "SQLAlchemy-2.0.26-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a870e6121a052f826f7ae1e4f0b54ca4c0ccd613278218ca036fa5e0f3be7df"}, - {file = "SQLAlchemy-2.0.26-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5901eed6d0e23ca4b04d66a561799d4f0fe55fcbfc7ca203bb8c3277f442085b"}, - {file = "SQLAlchemy-2.0.26-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25fe55aab9b20ae4a9523bb269074202be9d92a145fcc0b752fff409754b5f6"}, - {file = "SQLAlchemy-2.0.26-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5310958d08b4bafc311052be42a3b7d61a93a2bf126ddde07b85f712e7e4ac7b"}, - {file = "SQLAlchemy-2.0.26-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd133afb7e6c59fad365ffa97fb06b1001f88e29e1de351bef3d2b1224e2f132"}, - {file = "SQLAlchemy-2.0.26-cp312-cp312-win32.whl", hash = "sha256:dc32ecf643c4904dd413e6a95a3f2c8a89ccd6f15083e586dcf8f42eb4e317ae"}, - {file = "SQLAlchemy-2.0.26-cp312-cp312-win_amd64.whl", hash = "sha256:6e25f029e8ad6d893538b5abe8537e7f09e21d8e96caee46a7e2199f3ddd77b0"}, - {file = "SQLAlchemy-2.0.26-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:99a9a8204b8937aa72421e31c493bfc12fd063a8310a0522e5a9b98e6323977c"}, - {file = "SQLAlchemy-2.0.26-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:691d68a4fca30c9a676623d094b600797699530e175b6524a9f57e3273f5fa8d"}, - {file = "SQLAlchemy-2.0.26-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79a74a4ca4310c812f97bf0f13ce00ed73c890954b5a20b32484a9ab60e567e9"}, - {file = "SQLAlchemy-2.0.26-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2efbbeb18c0e1c53b670a46a009fbde7b58e05b397a808c7e598532b17c6f4b"}, - {file = "SQLAlchemy-2.0.26-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3fc557f5402206c18ec3d288422f8e5fa764306d49f4efbc6090a7407bf54938"}, - {file = "SQLAlchemy-2.0.26-cp37-cp37m-win32.whl", hash = "sha256:a9846ffee3283cff4ec476e7ee289314290fcb2384aab5045c6f481c5c4d011f"}, - {file = "SQLAlchemy-2.0.26-cp37-cp37m-win_amd64.whl", hash = "sha256:ed4667d3d5d6e203a271d684d5b213ebcd618f7a8bc605752a8865eb9e67a79a"}, - {file = "SQLAlchemy-2.0.26-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79e629df3f69f849a1482a2d063596b23e32036b83547397e68725e6e0d0a9ab"}, - {file = "SQLAlchemy-2.0.26-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4b4d848b095173e0a9e377127b814490499e55f5168f617ae2c07653c326b9d1"}, - {file = "SQLAlchemy-2.0.26-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f06afe8e96d7f221cc0b59334dc400151be22f432785e895e37030579d253c3"}, - {file = "SQLAlchemy-2.0.26-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f75ac12d302205e60f77f46bd162d40dc37438f1f8db160d2491a78b19a0bd61"}, - {file = "SQLAlchemy-2.0.26-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec3717c1efee8ad4b97f6211978351de3abe1e4b5f73e32f775c7becec021c5c"}, - {file = "SQLAlchemy-2.0.26-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06ed4d6bb2365222fb9b0a05478a2d23ad8c1dd874047a9ae1ca1d45f18a255e"}, - {file = "SQLAlchemy-2.0.26-cp38-cp38-win32.whl", hash = "sha256:caa79a6caeb4a3cc4ddb9aba9205c383f5d3bcb60d814e87e74570514754e073"}, - {file = "SQLAlchemy-2.0.26-cp38-cp38-win_amd64.whl", hash = "sha256:996b41c38e34a980e9f810d6e2709a3196e29ee34e46e3c16f96c63da10a9da1"}, - {file = "SQLAlchemy-2.0.26-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4f57af0866f6629eae2d24d022ba1a4c1bac9b16d45027bbfcda4c9d5b0d8f26"}, - {file = "SQLAlchemy-2.0.26-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e1a532bc33163fb19c4759a36504a23e63032bc8d47cee1c66b0b70a04a0957b"}, - {file = "SQLAlchemy-2.0.26-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02a4f954ccb17bd8cff56662efc806c5301508233dc38d0253a5fdb2f33ca3ba"}, - {file = "SQLAlchemy-2.0.26-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a678f728fb075e74aaa7fdc27f8af8f03f82d02e7419362cc8c2a605c16a4114"}, - {file = "SQLAlchemy-2.0.26-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8b39462c9588d4780f041e1b84d2ba038ac01c441c961bbee622dd8f53dec69f"}, - {file = "SQLAlchemy-2.0.26-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98f4d0d2bda2921af5b0c2ca99207cdab00f2922da46a6336c62c8d6814303a7"}, - {file = "SQLAlchemy-2.0.26-cp39-cp39-win32.whl", hash = "sha256:6d68e6b507a3dd20c0add86ac0a0ca061d43c9a0162a122baa5fe952f14240f1"}, - {file = "SQLAlchemy-2.0.26-cp39-cp39-win_amd64.whl", hash = "sha256:fb97a9b93b953084692a52a7877957b7a88dfcedc0c5652124f5aebf5999f7fe"}, - {file = "SQLAlchemy-2.0.26-py3-none-any.whl", hash = "sha256:1128b2cdf49107659f6d1f452695f43a20694cc9305a86e97b70793a1c74eeb4"}, - {file = "SQLAlchemy-2.0.26.tar.gz", hash = "sha256:e1bcd8fcb30305e27355d553608c2c229d3e589fb7ff406da7d7e5d50fa14d0d"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, + {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, + {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, ] [package.dependencies] From 4e2127fdfc87ae942d0fe49a3558f75dda45b3e7 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 14 Feb 2024 08:55:10 +0100 Subject: [PATCH 115/169] Set version in path instead of OS (#422) Bundling this by version makes it easier for the release --- .github/workflows/python-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index f1f1e0c2e6..db88f7e824 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -82,5 +82,5 @@ jobs: - uses: actions/upload-artifact@v3 with: - name: "release-${{ matrix.os }}" + name: "release-${{ github.event.inputs.version }}" path: ./wheelhouse/* From cc449266e7fe0e97f23e61b3c732b75a0d0a8dec Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 14 Feb 2024 09:11:13 +0100 Subject: [PATCH 116/169] Fix environment variable parsing (#423) * Fix environment variable parsing ```bash export PYICEBERG_CATALOG__SOMETHING__S3__REGION=eu-north-1 ``` Before: ```python >>> from pyiceberg.catalog import load_catalog >>> load_catalog('something').properties {'s3': {'region': 'eu-north-1'}, ...} ``` After: ```python >>> from pyiceberg.catalog import load_catalog >>> load_catalog('something').properties {'s3.region': 'eu-north-1', ...} ``` Which correspondents with the key `s3.region` that we use. * Add second test Co-authored-by: Hussein Awala * lint --------- Co-authored-by: Hussein Awala --- pyiceberg/utils/config.py | 4 ++-- tests/utils/test_config.py | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/pyiceberg/utils/config.py b/pyiceberg/utils/config.py index 31ba0b36ed..e038005469 100644 --- a/pyiceberg/utils/config.py +++ b/pyiceberg/utils/config.py @@ -125,8 +125,8 @@ def set_property(_config: RecursiveDict, path: List[str], config_value: str) -> env_var_lower = env_var.lower() if env_var_lower.startswith(PYICEBERG.lower()): key = env_var_lower[len(PYICEBERG) :] - parts = key.split("__") - parts_normalized = [part.replace("_", "-") for part in parts] + parts = key.split("__", maxsplit=2) + parts_normalized = [part.replace('__', '.').replace("_", "-") for part in parts] set_property(config, parts_normalized, config_value) return config diff --git a/tests/utils/test_config.py b/tests/utils/test_config.py index d694e15562..5e3f72ccc6 100644 --- a/tests/utils/test_config.py +++ b/tests/utils/test_config.py @@ -41,6 +41,20 @@ def test_from_environment_variables_uppercase() -> None: assert Config().get_catalog_config("PRODUCTION") == {"uri": "https://service.io/api"} +@mock.patch.dict( + os.environ, + { + "PYICEBERG_CATALOG__PRODUCTION__S3__REGION": "eu-north-1", + "PYICEBERG_CATALOG__PRODUCTION__S3__ACCESS_KEY_ID": "username", + }, +) +def test_fix_nested_objects_from_environment_variables() -> None: + assert Config().get_catalog_config("PRODUCTION") == { + 's3.region': 'eu-north-1', + 's3.access-key-id': 'username', + } + + def test_from_configuration_files(tmp_path_factory: pytest.TempPathFactory) -> None: config_path = str(tmp_path_factory.mktemp("config")) with open(f"{config_path}/.pyiceberg.yaml", "w", encoding=UTF8) as file: From 0200eb254ba16aab021c20adee644c45a3cc36e1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Feb 2024 13:22:52 +0100 Subject: [PATCH 117/169] Build: Bump getdaft from 0.2.14 to 0.2.15 (#434) Bumps [getdaft](https://github.com/Eventual-Inc/Daft) from 0.2.14 to 0.2.15. - [Release notes](https://github.com/Eventual-Inc/Daft/releases) - [Commits](https://github.com/Eventual-Inc/Daft/compare/v0.2.14...v0.2.15) --- updated-dependencies: - dependency-name: getdaft dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8ce5846003..46d96d2441 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1219,17 +1219,17 @@ gcsfuse = ["fusepy"] [[package]] name = "getdaft" -version = "0.2.14" +version = "0.2.15" description = "Distributed Dataframes for Multimodal Data" optional = true python-versions = ">=3.7" files = [ - {file = "getdaft-0.2.14-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:01754aa7f6059cfec363eff9a8e46197bbbc65634da9c12cd998ce2223fb8c45"}, - {file = "getdaft-0.2.14-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:8cf445538b7e0d5016c548b0e951dab3f5cf2fe6303bddd09d9c5ff5747894e1"}, - {file = "getdaft-0.2.14-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3add9d73702765e261d3e88ef04099d6a9c95f90d19c96ff4e014f4d9ed6433e"}, - {file = "getdaft-0.2.14-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:110b0b35bc9732926f9a8b9c3f6996d834a0f24ccb0c400f9b03d580dcd90096"}, - {file = "getdaft-0.2.14-cp37-abi3-win_amd64.whl", hash = "sha256:34ebbbf040982f219c3bae5e2046322150332a3e65ad67ba71985db59b2fff68"}, - {file = "getdaft-0.2.14.tar.gz", hash = "sha256:07cd349fe961536c6bd172b6cf02217fb1fd23a2a021571f6a5c6f0dddb184ec"}, + {file = "getdaft-0.2.15-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:95c16b0f25a78a13cab21128bcd0b5e7af65a14b12fd037ef8ec8aee5b7ac911"}, + {file = "getdaft-0.2.15-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:6afb66507ae899fb32adc8532b25ddd245a14d695a099ceb594afcb24848adb0"}, + {file = "getdaft-0.2.15-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:580a9971807e30a21136ae10eeb39cb2c880ab6eb87a464447206e4d36d52e2b"}, + {file = "getdaft-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44424e2adc80f12e3a404cc389a1b37eabbd1c8a3f0345d218852a65a7c3593d"}, + {file = "getdaft-0.2.15-cp37-abi3-win_amd64.whl", hash = "sha256:10103355c8a48455a1b2262bc1b7eca6b495059da7f2d220758bc273b734b898"}, + {file = "getdaft-0.2.15.tar.gz", hash = "sha256:5729915db8e15b6d42568cceef4a588ecdc5ce1a29f52c362e41d789bffb32e7"}, ] [package.dependencies] From 39280c8d1e3b9e5498cd7920543568c34bca0453 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 18 Feb 2024 09:30:43 +0100 Subject: [PATCH 118/169] Build: Bump cryptography from 42.0.0 to 42.0.2 (#440) Bumps [cryptography](https://github.com/pyca/cryptography) from 42.0.0 to 42.0.2. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/42.0.0...42.0.2) --- updated-dependencies: - dependency-name: cryptography dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 66 ++++++++++++++++++++++++++--------------------------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/poetry.lock b/poetry.lock index 46d96d2441..92ab37351d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -708,43 +708,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.0" +version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434"}, - {file = "cryptography-42.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b"}, - {file = "cryptography-42.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec"}, - {file = "cryptography-42.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc"}, - {file = "cryptography-42.0.0-cp37-abi3-win32.whl", hash = "sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4"}, - {file = "cryptography-42.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0"}, - {file = "cryptography-42.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139"}, - {file = "cryptography-42.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81"}, - {file = "cryptography-42.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221"}, - {file = "cryptography-42.0.0-cp39-abi3-win32.whl", hash = "sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b"}, - {file = "cryptography-42.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f"}, - {file = "cryptography-42.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0"}, - {file = "cryptography-42.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce"}, - {file = "cryptography-42.0.0.tar.gz", hash = "sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, + {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, + {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, + {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, + {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, + {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, + {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, ] [package.dependencies] From b737f447d00761068fd17175543f5707de1da3b3 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Mon, 19 Feb 2024 19:00:35 +0100 Subject: [PATCH 119/169] docs: Add missing release steps (#443) --- mkdocs/docs/how-to-release.md | 32 +++++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/mkdocs/docs/how-to-release.md b/mkdocs/docs/how-to-release.md index 5a2072ec03..e41250a1f9 100644 --- a/mkdocs/docs/how-to-release.md +++ b/mkdocs/docs/how-to-release.md @@ -146,11 +146,37 @@ cat release-announcement-email.txt ## Vote has passed -Once the vote has been passed, the latest version can be pushed to PyPi. Check out the Apache SVN and make sure to publish the right version with `twine`: +Once the vote has been passed, you can close the vote thread by concluding it: + +``` +Thanks everyone for voting! The 72 hours have passed, and a minimum of 3 binding votes have been cast: + ++1 Foo Bar (non-binding) +... ++1 Fokko Driesprong (binding) + +The release candidate has been accepted as PyIceberg . Thanks everyone, when all artifacts are published the announcement will be sent out. + +Kind regards, +``` + +### Copy the artifacts to the release dist + +``` +svn checkout https://dist.apache.org/repos/dist/dev/iceberg /tmp/iceberg-dist-dev +svn checkout https://dist.apache.org/repos/dist/release/iceberg/ /tmp/iceberg-dist-release + +mkdir -p /tmp/iceberg-dist-release/pyiceberg- +cp -r /tmp/iceberg-dist-dev/pyiceberg-rcN/* /tmp/iceberg-dist-release/pyiceberg- + +svn add /tmp/iceberg-dist-release/ +svn ci -m "PyIceberg " /tmp/iceberg-dist-release/ +``` + +The latest version can be pushed to PyPi. Check out the Apache SVN and make sure to publish the right version with `twine`: ```bash -svn checkout https://dist.apache.org/repos/dist/dev/iceberg /tmp/ -twine upload -s /tmp/iceberg/pyiceberg-0.1.0rc1 +twine upload -s /tmp/iceberg-dist-release/pyiceberg-/* ``` Send out an announcement on the dev mail list: From 0dee2e100581ac7a45a6f7cce18ac5d6cfd8964a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 21:05:07 -0800 Subject: [PATCH 120/169] Build: Bump moto from 5.0.1 to 5.0.2 (#447) Bumps [moto](https://github.com/getmoto/moto) from 5.0.1 to 5.0.2. - [Release notes](https://github.com/getmoto/moto/releases) - [Changelog](https://github.com/getmoto/moto/blob/master/CHANGELOG.md) - [Commits](https://github.com/getmoto/moto/compare/5.0.1...5.0.2) --- updated-dependencies: - dependency-name: moto dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 106 +++++++++++++++---------------------------------- pyproject.toml | 2 +- 2 files changed, 32 insertions(+), 76 deletions(-) diff --git a/poetry.lock b/poetry.lock index 92ab37351d..bc5ee8ee3d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -937,24 +937,6 @@ files = [ {file = "duckdb-0.10.0.tar.gz", hash = "sha256:c02bcc128002aa79e3c9d89b9de25e062d1096a8793bc0d7932317b7977f6845"}, ] -[[package]] -name = "ecdsa" -version = "0.18.0" -description = "ECDSA cryptographic signature library (pure python)" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, - {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, -] - -[package.dependencies] -six = ">=1.9.0" - -[package.extras] -gmpy = ["gmpy"] -gmpy2 = ["gmpy2"] - [[package]] name = "exceptiongroup" version = "1.2.0" @@ -1675,6 +1657,23 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "joserfc" +version = "0.9.0" +description = "The ultimate Python library for JOSE RFCs, including JWS, JWE, JWK, JWA, JWT" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joserfc-0.9.0-py3-none-any.whl", hash = "sha256:4026bdbe2c196cd40574e916fa1e28874d99649412edaab0e373dec3077153fb"}, + {file = "joserfc-0.9.0.tar.gz", hash = "sha256:eebca7f587b1761ce43a98ffd5327f2b600b9aa5bb0a77b947687f503ad43bc0"}, +] + +[package.dependencies] +cryptography = "*" + +[package.extras] +drafts = ["pycryptodome"] + [[package]] name = "jschema-to-python" version = "1.2.3" @@ -2007,13 +2006,13 @@ files = [ [[package]] name = "moto" -version = "5.0.1" +version = "5.0.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "moto-5.0.1-py2.py3-none-any.whl", hash = "sha256:94e3b07a403cc8078ffee94bf404ef677112d036a57ddb5e0f19c5fcf48987f5"}, - {file = "moto-5.0.1.tar.gz", hash = "sha256:62b9798aef9028432194cebb7a671f4064257bb3be662d9c1b83b94411b694bb"}, + {file = "moto-5.0.2-py2.py3-none-any.whl", hash = "sha256:71bb832a18b64f10fc4cec117b9b0e2305e5831d9a17eb74f6b9819ed7613843"}, + {file = "moto-5.0.2.tar.gz", hash = "sha256:7e27395e5c63ff9554ae14b5baa41bfe6d6b1be0e59eb02977c6ce28411246de"}, ] [package.dependencies] @@ -2023,44 +2022,41 @@ botocore = ">=1.14.0" cfn-lint = {version = ">=0.40.0", optional = true, markers = "extra == \"server\""} cryptography = ">=3.3.1" docker = {version = ">=3.0.0", optional = true, markers = "extra == \"server\""} -ecdsa = {version = "!=0.15", optional = true, markers = "extra == \"server\""} flask = {version = "<2.2.0 || >2.2.0,<2.2.1 || >2.2.1", optional = true, markers = "extra == \"server\""} flask-cors = {version = "*", optional = true, markers = "extra == \"server\""} graphql-core = {version = "*", optional = true, markers = "extra == \"server\""} Jinja2 = ">=2.10.1" +joserfc = {version = ">=0.9.0", optional = true, markers = "extra == \"server\""} jsondiff = {version = ">=1.1.2", optional = true, markers = "extra == \"server\""} openapi-spec-validator = {version = ">=0.5.0", optional = true, markers = "extra == \"server\""} py-partiql-parser = {version = "0.5.1", optional = true, markers = "extra == \"server\""} pyparsing = {version = ">=3.0.7", optional = true, markers = "extra == \"server\""} python-dateutil = ">=2.1,<3.0.0" -python-jose = {version = ">=3.1.0,<4.0.0", extras = ["cryptography"], optional = true, markers = "extra == \"server\""} PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"server\""} requests = ">=2.5" responses = ">=0.15.0" setuptools = {version = "*", optional = true, markers = "extra == \"server\""} -sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"server\""} werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.1)"] -ec2 = ["sshpubkeys (>=3.1.0)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)"] s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.1)"] s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.1)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.1)", "pyparsing (>=3.0.7)", "setuptools"] ssm = ["PyYAML (>=5.1)"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] @@ -2786,7 +2782,7 @@ numpy = ">=1.16.6,<2" name = "pyasn1" version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false +optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, @@ -3098,28 +3094,6 @@ files = [ [package.dependencies] six = ">=1.5" -[[package]] -name = "python-jose" -version = "3.3.0" -description = "JOSE implementation in Python" -optional = false -python-versions = "*" -files = [ - {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, - {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, -] - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""} -ecdsa = "!=0.15" -pyasn1 = "*" -rsa = "*" - -[package.extras] -cryptography = ["cryptography (>=3.4.0)"] -pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] -pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] - [[package]] name = "python-snappy" version = "0.6.1" @@ -3665,7 +3639,7 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -optional = false +optional = true python-versions = ">=3.6,<4" files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, @@ -3852,24 +3826,6 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] -[[package]] -name = "sshpubkeys" -version = "3.3.1" -description = "SSH public key parser" -optional = false -python-versions = ">=3" -files = [ - {file = "sshpubkeys-3.3.1-py2.py3-none-any.whl", hash = "sha256:946f76b8fe86704b0e7c56a00d80294e39bc2305999844f079a217885060b1ac"}, - {file = "sshpubkeys-3.3.1.tar.gz", hash = "sha256:3020ed4f8c846849299370fbe98ff4157b0ccc1accec105e07cfa9ae4bb55064"}, -] - -[package.dependencies] -cryptography = ">=2.1.4" -ecdsa = ">=0.13" - -[package.extras] -dev = ["twine", "wheel", "yapf"] - [[package]] name = "strictyaml" version = "1.7.3" @@ -4328,4 +4284,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "d6213b5486c28836484089edadcf1b69f3cce1e44bf3e426a8ccab7cdddd246b" +content-hash = "0a531611ad417be25c145f961e4d808b01a977dd323510e2fcccc262191b7caf" diff --git a/pyproject.toml b/pyproject.toml index a07c51df02..787edec6ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ pre-commit = "3.5.0" fastavro = "1.9.4" coverage = { version = "^7.4.1", extras = ["toml"] } requests-mock = "1.11.0" -moto = { version = "^5.0.1", extras = ["server"] } +moto = { version = "^5.0.2", extras = ["server"] } typing-extensions = "4.9.0" pytest-mock = "3.12.0" pyspark = "3.5.0" From c23c24ddd113139ad56aadbb982785bf5a19dfd7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 21:06:06 -0800 Subject: [PATCH 121/169] Build: Bump mkdocs-material from 9.5.9 to 9.5.10 (#448) Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.9 to 9.5.10. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.9...9.5.10) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- mkdocs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs/requirements.txt b/mkdocs/requirements.txt index 03d9e4f479..e2099b7c58 100644 --- a/mkdocs/requirements.txt +++ b/mkdocs/requirements.txt @@ -23,6 +23,6 @@ mkdocstrings-python==1.8.0 mkdocs-literate-nav==0.6.1 mkdocs-autorefs==0.5.0 mkdocs-gen-files==0.5.0 -mkdocs-material==9.5.9 +mkdocs-material==9.5.10 mkdocs-material-extensions==1.3.1 mkdocs-section-index==0.3.8 From 015226d81bdc81101ba7f34513f0bd14bca23411 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 20 Feb 2024 20:09:14 +0100 Subject: [PATCH 122/169] Make the snapshot creation part of the `Transaction` (#446) * Make the snapshot creation part of the `Transaction` This is also how it is done in Java, and I really like it since it allows you to easily queue up updates in a transaction. For example, an update to the schema. * Extend the API --- pyiceberg/io/pyarrow.py | 11 +- pyiceberg/table/__init__.py | 251 ++++++++++++++++++++----------- tests/integration/test_writes.py | 42 ++++++ 3 files changed, 213 insertions(+), 91 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 57f09ba172..f391abfea2 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -1714,7 +1714,7 @@ def fill_parquet_file_metadata( data_file.split_offsets = split_offsets -def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: +def write_file(table: Table, tasks: Iterator[WriteTask], file_schema: Optional[Schema] = None) -> Iterator[DataFile]: task = next(tasks) try: @@ -1727,7 +1727,8 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: parquet_writer_kwargs = _get_parquet_writer_kwargs(table.properties) file_path = f'{table.location()}/data/{task.generate_data_file_filename("parquet")}' - file_schema = schema_to_pyarrow(table.schema()) + file_schema = file_schema or table.schema() + arrow_file_schema = schema_to_pyarrow(file_schema) fo = table.io.new_output(file_path) row_group_size = PropertyUtil.property_as_int( @@ -1736,7 +1737,7 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: default=TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES_DEFAULT, ) with fo.create(overwrite=True) as fos: - with pq.ParquetWriter(fos, schema=file_schema, **parquet_writer_kwargs) as writer: + with pq.ParquetWriter(fos, schema=arrow_file_schema, **parquet_writer_kwargs) as writer: writer.write_table(task.df, row_group_size=row_group_size) data_file = DataFile( @@ -1758,8 +1759,8 @@ def write_file(table: Table, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: fill_parquet_file_metadata( data_file=data_file, parquet_metadata=writer.writer.metadata, - stats_columns=compute_statistics_plan(table.schema(), table.properties), - parquet_column_mapping=parquet_path_to_id_mapping(table.schema()), + stats_columns=compute_statistics_plan(file_schema, table.properties), + parquet_column_mapping=parquet_path_to_id_mapping(file_schema), ) return iter([data_file]) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index a87435fcfb..a939294f30 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -329,6 +329,14 @@ def update_schema(self) -> UpdateSchema: """ return UpdateSchema(self._table, self) + def update_snapshot(self) -> UpdateSnapshot: + """Create a new UpdateSnapshot to produce a new snapshot for the table. + + Returns: + A new UpdateSnapshot + """ + return UpdateSnapshot(self._table, self) + def remove_properties(self, *removals: str) -> Transaction: """Remove properties. @@ -351,6 +359,12 @@ def update_location(self, location: str) -> Transaction: """ raise NotImplementedError("Not yet implemented") + def schema(self) -> Schema: + try: + return next(update for update in self._updates if isinstance(update, AddSchemaUpdate)).schema_ + except StopIteration: + return self._table.schema() + def commit_transaction(self) -> Table: """Commit the changes to the catalog. @@ -965,8 +979,21 @@ def history(self) -> List[SnapshotLogEntry]: return self.metadata.snapshot_log def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: + """Create a new UpdateSchema to alter the columns of this table. + + Returns: + A new UpdateSchema. + """ return UpdateSchema(self, allow_incompatible_changes=allow_incompatible_changes, case_sensitive=case_sensitive) + def update_snapshot(self) -> UpdateSnapshot: + """Create a new UpdateSnapshot to produce a new snapshot for the table. + + Returns: + A new UpdateSnapshot + """ + return UpdateSnapshot(self) + def name_mapping(self) -> NameMapping: """Return the table's field-id NameMapping.""" if name_mapping_json := self.properties.get(TableProperties.DEFAULT_NAME_MAPPING): @@ -976,7 +1003,7 @@ def name_mapping(self) -> NameMapping: def append(self, df: pa.Table) -> None: """ - Append data to the table. + Shorthand API for appending a PyArrow table to the table. Args: df: The Arrow dataframe that will be appended to overwrite the table @@ -992,19 +1019,16 @@ def append(self, df: pa.Table) -> None: if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - merge = _MergingSnapshotProducer(operation=Operation.APPEND, table=self) - - # skip writing data files if the dataframe is empty - if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, df=df) - for data_file in data_files: - merge.append_data_file(data_file) - - merge.commit() + with self.update_snapshot().fast_append() as update_snapshot: + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files(self, df=df) + for data_file in data_files: + update_snapshot.append_data_file(data_file) def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_TRUE) -> None: """ - Overwrite all the data in the table. + Shorthand for overwriting the table with a PyArrow table. Args: df: The Arrow dataframe that will be used to overwrite the table @@ -1025,18 +1049,12 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - merge = _MergingSnapshotProducer( - operation=Operation.OVERWRITE if self.current_snapshot() is not None else Operation.APPEND, - table=self, - ) - - # skip writing data files if the dataframe is empty - if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, df=df) - for data_file in data_files: - merge.append_data_file(data_file) - - merge.commit() + with self.update_snapshot().overwrite() as update_snapshot: + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files(self, df=df) + for data_file in data_files: + update_snapshot.append_data_file(data_file) def refs(self) -> Dict[str, SnapshotRef]: """Return the snapshot references in the table.""" @@ -2331,7 +2349,12 @@ def _generate_manifest_list_path(location: str, snapshot_id: int, attempt: int, return f'{location}/metadata/snap-{snapshot_id}-{attempt}-{commit_uuid}.avro' -def _dataframe_to_data_files(table: Table, df: pa.Table) -> Iterable[DataFile]: +def _dataframe_to_data_files(table: Table, df: pa.Table, file_schema: Optional[Schema] = None) -> Iterable[DataFile]: + """Convert a PyArrow table into a DataFile. + + Returns: + An iterable that supplies datafiles that represent the table. + """ from pyiceberg.io.pyarrow import write_file if len(table.spec().fields) > 0: @@ -2342,7 +2365,7 @@ def _dataframe_to_data_files(table: Table, df: pa.Table) -> Iterable[DataFile]: # This is an iter, so we don't have to materialize everything every time # This will be more relevant when we start doing partitioned writes - yield from write_file(table, iter([WriteTask(write_uuid, next(counter), df)])) + yield from write_file(table, iter([WriteTask(write_uuid, next(counter), df)]), file_schema=file_schema) class _MergingSnapshotProducer: @@ -2352,8 +2375,9 @@ class _MergingSnapshotProducer: _parent_snapshot_id: Optional[int] _added_data_files: List[DataFile] _commit_uuid: uuid.UUID + _transaction: Optional[Transaction] - def __init__(self, operation: Operation, table: Table) -> None: + def __init__(self, operation: Operation, table: Table, transaction: Optional[Transaction] = None) -> None: self._operation = operation self._table = table self._snapshot_id = table.new_snapshot_id() @@ -2361,46 +2385,25 @@ def __init__(self, operation: Operation, table: Table) -> None: self._parent_snapshot_id = snapshot.snapshot_id if (snapshot := self._table.current_snapshot()) else None self._added_data_files = [] self._commit_uuid = uuid.uuid4() + self._transaction = transaction + + def __enter__(self) -> _MergingSnapshotProducer: + """Start a transaction to update the table.""" + return self + + def __exit__(self, _: Any, value: Any, traceback: Any) -> None: + """Close and commit the transaction.""" + self.commit() def append_data_file(self, data_file: DataFile) -> _MergingSnapshotProducer: self._added_data_files.append(data_file) return self - def _deleted_entries(self) -> List[ManifestEntry]: - """To determine if we need to record any deleted entries. - - With partial overwrites we have to use the predicate to evaluate - which entries are affected. - """ - if self._operation == Operation.OVERWRITE: - if self._parent_snapshot_id is not None: - previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) - if previous_snapshot is None: - # This should never happen since you cannot overwrite an empty table - raise ValueError(f"Could not find the previous snapshot: {self._parent_snapshot_id}") - - executor = ExecutorFactory.get_or_create() - - def _get_entries(manifest: ManifestFile) -> List[ManifestEntry]: - return [ - ManifestEntry( - status=ManifestEntryStatus.DELETED, - snapshot_id=entry.snapshot_id, - data_sequence_number=entry.data_sequence_number, - file_sequence_number=entry.file_sequence_number, - data_file=entry.data_file, - ) - for entry in manifest.fetch_manifest_entry(self._table.io, discard_deleted=True) - if entry.data_file.content == DataFileContent.DATA - ] + @abstractmethod + def _deleted_entries(self) -> List[ManifestEntry]: ... - list_of_entries = executor.map(_get_entries, previous_snapshot.manifests(self._table.io)) - return list(chain(*list_of_entries)) - return [] - elif self._operation == Operation.APPEND: - return [] - else: - raise ValueError(f"Not implemented for: {self._operation}") + @abstractmethod + def _existing_manifests(self) -> List[ManifestFile]: ... def _manifests(self) -> List[ManifestFile]: def _write_added_manifest() -> List[ManifestFile]: @@ -2430,7 +2433,7 @@ def _write_added_manifest() -> List[ManifestFile]: def _write_delete_manifest() -> List[ManifestFile]: # Check if we need to mark the files as deleted deleted_entries = self._deleted_entries() - if deleted_entries: + if len(deleted_entries) > 0: output_file_location = _new_manifest_path(location=self._table.location(), num=1, commit_uuid=self._commit_uuid) with write_manifest( format_version=self._table.format_version, @@ -2445,32 +2448,11 @@ def _write_delete_manifest() -> List[ManifestFile]: else: return [] - def _fetch_existing_manifests() -> List[ManifestFile]: - existing_manifests = [] - - # Add existing manifests - if self._operation == Operation.APPEND and self._parent_snapshot_id is not None: - # In case we want to append, just add the existing manifests - previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) - - if previous_snapshot is None: - raise ValueError(f"Snapshot could not be found: {self._parent_snapshot_id}") - - for manifest in previous_snapshot.manifests(io=self._table.io): - if ( - manifest.has_added_files() - or manifest.has_existing_files() - or manifest.added_snapshot_id == self._snapshot_id - ): - existing_manifests.append(manifest) - - return existing_manifests - executor = ExecutorFactory.get_or_create() added_manifests = executor.submit(_write_added_manifest) delete_manifests = executor.submit(_write_delete_manifest) - existing_manifests = executor.submit(_fetch_existing_manifests) + existing_manifests = executor.submit(self._existing_manifests) return added_manifests.result() + delete_manifests.result() + existing_manifests.result() @@ -2515,10 +2497,107 @@ def commit(self) -> Snapshot: schema_id=self._table.schema().schema_id, ) - with self._table.transaction() as tx: - tx.add_snapshot(snapshot=snapshot) - tx.set_ref_snapshot( + if self._transaction is not None: + self._transaction.add_snapshot(snapshot=snapshot) + self._transaction.set_ref_snapshot( snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, ref_name="main", type="branch" ) + else: + with self._table.transaction() as tx: + tx.add_snapshot(snapshot=snapshot) + tx.set_ref_snapshot( + snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, ref_name="main", type="branch" + ) return snapshot + + +class FastAppendFiles(_MergingSnapshotProducer): + def _existing_manifests(self) -> List[ManifestFile]: + """To determine if there are any existing manifest files. + + A fast append will add another ManifestFile to the ManifestList. + All the existing manifest files are considered existing. + """ + existing_manifests = [] + + if self._parent_snapshot_id is not None: + previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) + + if previous_snapshot is None: + raise ValueError(f"Snapshot could not be found: {self._parent_snapshot_id}") + + for manifest in previous_snapshot.manifests(io=self._table.io): + if manifest.has_added_files() or manifest.has_existing_files() or manifest.added_snapshot_id == self._snapshot_id: + existing_manifests.append(manifest) + + return existing_manifests + + def _deleted_entries(self) -> List[ManifestEntry]: + """To determine if we need to record any deleted manifest entries. + + In case of an append, nothing is deleted. + """ + return [] + + +class OverwriteFiles(_MergingSnapshotProducer): + def _existing_manifests(self) -> List[ManifestFile]: + """To determine if there are any existing manifest files. + + In the of a full overwrite, all the previous manifests are + considered deleted. + """ + return [] + + def _deleted_entries(self) -> List[ManifestEntry]: + """To determine if we need to record any deleted entries. + + With a full overwrite all the entries are considered deleted. + With partial overwrites we have to use the predicate to evaluate + which entries are affected. + """ + if self._parent_snapshot_id is not None: + previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) + if previous_snapshot is None: + # This should never happen since you cannot overwrite an empty table + raise ValueError(f"Could not find the previous snapshot: {self._parent_snapshot_id}") + + executor = ExecutorFactory.get_or_create() + + def _get_entries(manifest: ManifestFile) -> List[ManifestEntry]: + return [ + ManifestEntry( + status=ManifestEntryStatus.DELETED, + snapshot_id=entry.snapshot_id, + data_sequence_number=entry.data_sequence_number, + file_sequence_number=entry.file_sequence_number, + data_file=entry.data_file, + ) + for entry in manifest.fetch_manifest_entry(self._table.io, discard_deleted=True) + if entry.data_file.content == DataFileContent.DATA + ] + + list_of_entries = executor.map(_get_entries, previous_snapshot.manifests(self._table.io)) + return list(chain(*list_of_entries)) + else: + return [] + + +class UpdateSnapshot: + _table: Table + _transaction: Optional[Transaction] + + def __init__(self, table: Table, transaction: Optional[Transaction] = None) -> None: + self._table = table + self._transaction = transaction + + def fast_append(self) -> FastAppendFiles: + return FastAppendFiles(table=self._table, operation=Operation.APPEND, transaction=self._transaction) + + def overwrite(self) -> OverwriteFiles: + return OverwriteFiles( + table=self._table, + operation=Operation.OVERWRITE if self._table.current_snapshot() is not None else Operation.APPEND, + transaction=self._transaction, + ) diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index 54b647b8ed..fa5e93d925 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -35,6 +35,7 @@ from pyiceberg.catalog.sql import SqlCatalog from pyiceberg.exceptions import NamespaceAlreadyExistsError, NoSuchTableError from pyiceberg.schema import Schema +from pyiceberg.table import _dataframe_to_data_files from pyiceberg.types import ( BinaryType, BooleanType, @@ -634,3 +635,44 @@ def test_duckdb_url_import(warehouse: Path, arrow_table_with_null: pa.Table) -> b'\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11', ), ] + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +def test_write_and_evolve(session_catalog: Catalog, format_version: int) -> None: + identifier = f"default.arrow_write_data_and_evolve_schema_v{format_version}" + + try: + session_catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + pa_table = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + }, + schema=pa.schema([pa.field("foo", pa.string(), nullable=True)]), + ) + + tbl = session_catalog.create_table( + identifier=identifier, schema=pa_table.schema, properties={"format-version": str(format_version)} + ) + + pa_table_with_column = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + 'bar': [19, None, 25], + }, + schema=pa.schema([ + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=True), + ]), + ) + + with tbl.transaction() as txn: + with txn.update_schema() as schema_txn: + schema_txn.union_by_name(pa_table_with_column.schema) + + with txn.update_snapshot().fast_append() as snapshot_update: + for data_file in _dataframe_to_data_files(table=tbl, df=pa_table_with_column, file_schema=txn.schema()): + snapshot_update.append_data_file(data_file) From f2aee48438731c0624207c62c62fff402981ad76 Mon Sep 17 00:00:00 2001 From: Eduard Tudenhoefner Date: Tue, 20 Feb 2024 22:06:30 +0100 Subject: [PATCH 123/169] Send X-Iceberg-Access-Delegation header to signal support for vended credentials/remote signing (#436) * Send X-Iceberg-Access-Delegation header to signal support for vended credentials/remote signing Clients can optionally send this header to signal which delegated access pattern it can support. At this point the iceberg-python client can support `vended-credentials` and `remote-signing`, thus we can always send this header. Addtional details about this header can be found in the REST OpenAPI spec: https://github.com/apache/iceberg/blob/main/open-api/rest-catalog-open-api.yaml#L1459-L1483 * Update rest.py --- pyiceberg/catalog/rest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index b4b9f722b5..8560668a0d 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -237,6 +237,7 @@ def _create_session(self) -> Session: session.headers["Content-type"] = "application/json" session.headers["X-Client-Version"] = ICEBERG_REST_SPEC_VERSION session.headers["User-Agent"] = f"PyIceberg/{__version__}" + session.headers["X-Iceberg-Access-Delegation"] = "vended-credentials" # Configure SigV4 Request Signing if str(self.properties.get(SIGV4, False)).lower() == "true": From 82d88920aaa7cba8593e93b5df9f3a87b13c88da Mon Sep 17 00:00:00 2001 From: Anupam Saini <4581797+anupam-saini@users.noreply.github.com> Date: Tue, 20 Feb 2024 16:07:41 -0500 Subject: [PATCH 124/169] Retry with new Access Token HTTP 419 (#340) * Refresh Auth token on expiry * Check call count * Add test to cover retry logic * Update poetry.lock with tenacity * Fix tests for Python <= 3.9 --- poetry.lock | 14 ++++++++ pyiceberg/catalog/rest.py | 46 ++++++++++++++++++++---- pyproject.toml | 5 +++ tests/catalog/test_rest.py | 72 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 130 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index bc5ee8ee3d..a772f46877 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3854,6 +3854,20 @@ files = [ [package.dependencies] mpmath = ">=0.19" +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + [[package]] name = "thrift" version = "0.16.0" diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 8560668a0d..9ca8aa641f 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -30,6 +30,7 @@ from pydantic import Field, ValidationError from requests import HTTPError, Session +from tenacity import RetryCallState, retry, retry_if_exception_type, stop_after_attempt from pyiceberg import __version__ from pyiceberg.catalog import ( @@ -118,6 +119,19 @@ class Endpoints: NAMESPACE_SEPARATOR = b"\x1F".decode(UTF8) +def _retry_hook(retry_state: RetryCallState) -> None: + rest_catalog: RestCatalog = retry_state.args[0] + rest_catalog._refresh_token() # pylint: disable=protected-access + + +_RETRY_ARGS = { + "retry": retry_if_exception_type(AuthorizationExpiredError), + "stop": stop_after_attempt(2), + "before": _retry_hook, + "reraise": True, +} + + class TableResponse(IcebergBaseModel): metadata_location: str = Field(alias="metadata-location") metadata: TableMetadata @@ -225,13 +239,7 @@ def _create_session(self) -> Session: elif ssl_client_cert := ssl_client.get(CERT): session.cert = ssl_client_cert - # If we have credentials, but not a token, we want to fetch a token - if TOKEN not in self.properties and CREDENTIAL in self.properties: - self.properties[TOKEN] = self._fetch_access_token(session, self.properties[CREDENTIAL]) - - # Set Auth token for subsequent calls in the session - if token := self.properties.get(TOKEN): - session.headers[AUTHORIZATION_HEADER] = f"{BEARER_PREFIX} {token}" + self._refresh_token(session, self.properties.get(TOKEN)) # Set HTTP headers session.headers["Content-type"] = "application/json" @@ -439,6 +447,18 @@ def _response_to_table(self, identifier_tuple: Tuple[str, ...], table_response: catalog=self, ) + def _refresh_token(self, session: Optional[Session] = None, new_token: Optional[str] = None) -> None: + session = session or self._session + if new_token is not None: + self.properties[TOKEN] = new_token + elif CREDENTIAL in self.properties: + self.properties[TOKEN] = self._fetch_access_token(session, self.properties[CREDENTIAL]) + + # Set Auth token for subsequent calls in the session + if token := self.properties.get(TOKEN): + session.headers[AUTHORIZATION_HEADER] = f"{BEARER_PREFIX} {token}" + + @retry(**_RETRY_ARGS) def create_table( self, identifier: Union[str, Identifier], @@ -475,6 +495,7 @@ def create_table( table_response = TableResponse(**response.json()) return self._response_to_table(self.identifier_to_tuple(identifier), table_response) + @retry(**_RETRY_ARGS) def register_table(self, identifier: Union[str, Identifier], metadata_location: str) -> Table: """Register a new table using existing metadata. @@ -506,6 +527,7 @@ def register_table(self, identifier: Union[str, Identifier], metadata_location: table_response = TableResponse(**response.json()) return self._response_to_table(self.identifier_to_tuple(identifier), table_response) + @retry(**_RETRY_ARGS) def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: namespace_tuple = self._check_valid_namespace_identifier(namespace) namespace_concat = NAMESPACE_SEPARATOR.join(namespace_tuple) @@ -516,6 +538,7 @@ def list_tables(self, namespace: Union[str, Identifier]) -> List[Identifier]: self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) return [(*table.namespace, table.name) for table in ListTablesResponse(**response.json()).identifiers] + @retry(**_RETRY_ARGS) def load_table(self, identifier: Union[str, Identifier]) -> Table: identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) response = self._session.get( @@ -529,6 +552,7 @@ def load_table(self, identifier: Union[str, Identifier]) -> Table: table_response = TableResponse(**response.json()) return self._response_to_table(identifier_tuple, table_response) + @retry(**_RETRY_ARGS) def drop_table(self, identifier: Union[str, Identifier], purge_requested: bool = False) -> None: identifier_tuple = self.identifier_to_tuple_without_catalog(identifier) response = self._session.delete( @@ -541,9 +565,11 @@ def drop_table(self, identifier: Union[str, Identifier], purge_requested: bool = except HTTPError as exc: self._handle_non_200_response(exc, {404: NoSuchTableError}) + @retry(**_RETRY_ARGS) def purge_table(self, identifier: Union[str, Identifier]) -> None: self.drop_table(identifier=identifier, purge_requested=True) + @retry(**_RETRY_ARGS) def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: Union[str, Identifier]) -> Table: from_identifier_tuple = self.identifier_to_tuple_without_catalog(from_identifier) payload = { @@ -558,6 +584,7 @@ def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: U return self.load_table(to_identifier) + @retry(**_RETRY_ARGS) def _commit_table(self, table_request: CommitTableRequest) -> CommitTableResponse: """Update the table. @@ -588,6 +615,7 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons ) return CommitTableResponse(**response.json()) + @retry(**_RETRY_ARGS) def create_namespace(self, namespace: Union[str, Identifier], properties: Properties = EMPTY_DICT) -> None: namespace_tuple = self._check_valid_namespace_identifier(namespace) payload = {"namespace": namespace_tuple, "properties": properties} @@ -597,6 +625,7 @@ def create_namespace(self, namespace: Union[str, Identifier], properties: Proper except HTTPError as exc: self._handle_non_200_response(exc, {404: NoSuchNamespaceError, 409: NamespaceAlreadyExistsError}) + @retry(**_RETRY_ARGS) def drop_namespace(self, namespace: Union[str, Identifier]) -> None: namespace_tuple = self._check_valid_namespace_identifier(namespace) namespace = NAMESPACE_SEPARATOR.join(namespace_tuple) @@ -606,6 +635,7 @@ def drop_namespace(self, namespace: Union[str, Identifier]) -> None: except HTTPError as exc: self._handle_non_200_response(exc, {404: NoSuchNamespaceError}) + @retry(**_RETRY_ARGS) def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identifier]: namespace_tuple = self.identifier_to_tuple(namespace) response = self._session.get( @@ -623,6 +653,7 @@ def list_namespaces(self, namespace: Union[str, Identifier] = ()) -> List[Identi namespaces = ListNamespaceResponse(**response.json()) return [namespace_tuple + child_namespace for child_namespace in namespaces.namespaces] + @retry(**_RETRY_ARGS) def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Properties: namespace_tuple = self._check_valid_namespace_identifier(namespace) namespace = NAMESPACE_SEPARATOR.join(namespace_tuple) @@ -634,6 +665,7 @@ def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Proper return NamespaceResponse(**response.json()).properties + @retry(**_RETRY_ARGS) def update_namespace_properties( self, namespace: Union[str, Identifier], removals: Optional[Set[str]] = None, updates: Properties = EMPTY_DICT ) -> PropertiesUpdateSummary: diff --git a/pyproject.toml b/pyproject.toml index 787edec6ca..f3f6de7d22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ sortedcontainers = "2.4.0" fsspec = ">=2023.1.0,<2024.1.0" pyparsing = ">=3.1.0,<4.0.0" zstandard = ">=0.13.0,<1.0.0" +tenacity = ">=8.2.3,<9.0.0" pyarrow = { version = ">=9.0.0,<16.0.0", optional = true } pandas = { version = ">=1.0.0,<3.0.0", optional = true } duckdb = { version = ">=0.5.0,<1.0.0", optional = true } @@ -301,6 +302,10 @@ ignore_missing_imports = true module = "setuptools.*" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "tenacity.*" +ignore_missing_imports = true + [tool.coverage.run] source = ['pyiceberg/'] diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 248cc14d88..7ae0d19558 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -26,6 +26,7 @@ from pyiceberg.catalog import PropertiesUpdateSummary, Table, load_catalog from pyiceberg.catalog.rest import AUTH_URL, RestCatalog from pyiceberg.exceptions import ( + AuthorizationExpiredError, NamespaceAlreadyExistsError, NoSuchNamespaceError, NoSuchTableError, @@ -266,6 +267,48 @@ def test_list_namespace_with_parent_200(rest_mock: Mocker) -> None: ] +def test_list_namespaces_419(rest_mock: Mocker) -> None: + new_token = "new_jwt_token" + new_header = dict(TEST_HEADERS) + new_header["Authorization"] = f"Bearer {new_token}" + + rest_mock.post( + f"{TEST_URI}v1/namespaces", + json={ + "error": { + "message": "Authorization expired.", + "type": "AuthorizationExpiredError", + "code": 419, + } + }, + status_code=419, + request_headers=TEST_HEADERS, + ) + rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": new_token, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + ) + rest_mock.get( + f"{TEST_URI}v1/namespaces", + json={"namespaces": [["default"], ["examples"], ["fokko"], ["system"]]}, + status_code=200, + request_headers=new_header, + ) + catalog = RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN, credential=TEST_CREDENTIALS) + assert catalog.list_namespaces() == [ + ("default",), + ("examples",), + ("fokko",), + ("system",), + ] + + def test_create_namespace_200(rest_mock: Mocker) -> None: namespace = "leden" rest_mock.post( @@ -517,6 +560,35 @@ def test_create_table_409(rest_mock: Mocker, table_schema_simple: Schema) -> Non assert "Table already exists" in str(e.value) +def test_create_table_419(rest_mock: Mocker, table_schema_simple: Schema) -> None: + rest_mock.post( + f"{TEST_URI}v1/namespaces/fokko/tables", + json={ + "error": { + "message": "Authorization expired.", + "type": "AuthorizationExpiredError", + "code": 419, + } + }, + status_code=419, + request_headers=TEST_HEADERS, + ) + + with pytest.raises(AuthorizationExpiredError) as e: + RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN).create_table( + identifier=("fokko", "fokko2"), + schema=table_schema_simple, + location=None, + partition_spec=PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=TruncateTransform(width=3), name="id") + ), + sort_order=SortOrder(SortField(source_id=2, transform=IdentityTransform())), + properties={"owner": "fokko"}, + ) + assert "Authorization expired" in str(e.value) + assert rest_mock.call_count == 3 + + def test_register_table_200( rest_mock: Mocker, table_schema_simple: Schema, example_table_metadata_no_snapshot_v1_rest_json: Dict[str, Any] ) -> None: From b32d3a5159e1ca0cd0c4da11635b2c82f5281de0 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 20 Feb 2024 22:52:22 +0100 Subject: [PATCH 125/169] Reuse commit-uuid as the write-uuid (#437) * Reuse commit-uuid as the write-uuid * Fix conflicts * Cleanup * cleanup --- pyiceberg/table/__init__.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index a939294f30..d70d4380f1 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -1022,7 +1022,7 @@ def append(self, df: pa.Table) -> None: with self.update_snapshot().fast_append() as update_snapshot: # skip writing data files if the dataframe is empty if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, df=df) + data_files = _dataframe_to_data_files(self, write_uuid=update_snapshot.commit_uuid, df=df) for data_file in data_files: update_snapshot.append_data_file(data_file) @@ -1052,7 +1052,7 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T with self.update_snapshot().overwrite() as update_snapshot: # skip writing data files if the dataframe is empty if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, df=df) + data_files = _dataframe_to_data_files(self, write_uuid=update_snapshot.commit_uuid, df=df) for data_file in data_files: update_snapshot.append_data_file(data_file) @@ -2349,7 +2349,9 @@ def _generate_manifest_list_path(location: str, snapshot_id: int, attempt: int, return f'{location}/metadata/snap-{snapshot_id}-{attempt}-{commit_uuid}.avro' -def _dataframe_to_data_files(table: Table, df: pa.Table, file_schema: Optional[Schema] = None) -> Iterable[DataFile]: +def _dataframe_to_data_files( + table: Table, df: pa.Table, write_uuid: Optional[uuid.UUID] = None, file_schema: Optional[Schema] = None +) -> Iterable[DataFile]: """Convert a PyArrow table into a DataFile. Returns: @@ -2360,8 +2362,8 @@ def _dataframe_to_data_files(table: Table, df: pa.Table, file_schema: Optional[S if len(table.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") - write_uuid = uuid.uuid4() counter = itertools.count(0) + write_uuid = write_uuid or uuid.uuid4() # This is an iter, so we don't have to materialize everything every time # This will be more relevant when we start doing partitioned writes @@ -2369,22 +2371,28 @@ def _dataframe_to_data_files(table: Table, df: pa.Table, file_schema: Optional[S class _MergingSnapshotProducer: + commit_uuid: uuid.UUID _operation: Operation _table: Table _snapshot_id: int _parent_snapshot_id: Optional[int] _added_data_files: List[DataFile] - _commit_uuid: uuid.UUID _transaction: Optional[Transaction] - def __init__(self, operation: Operation, table: Table, transaction: Optional[Transaction] = None) -> None: + def __init__( + self, + operation: Operation, + table: Table, + commit_uuid: Optional[uuid.UUID] = None, + transaction: Optional[Transaction] = None, + ) -> None: + self.commit_uuid = commit_uuid or uuid.uuid4() self._operation = operation self._table = table self._snapshot_id = table.new_snapshot_id() # Since we only support the main branch for now self._parent_snapshot_id = snapshot.snapshot_id if (snapshot := self._table.current_snapshot()) else None self._added_data_files = [] - self._commit_uuid = uuid.uuid4() self._transaction = transaction def __enter__(self) -> _MergingSnapshotProducer: @@ -2408,7 +2416,7 @@ def _existing_manifests(self) -> List[ManifestFile]: ... def _manifests(self) -> List[ManifestFile]: def _write_added_manifest() -> List[ManifestFile]: if self._added_data_files: - output_file_location = _new_manifest_path(location=self._table.location(), num=0, commit_uuid=self._commit_uuid) + output_file_location = _new_manifest_path(location=self._table.location(), num=0, commit_uuid=self.commit_uuid) with write_manifest( format_version=self._table.format_version, spec=self._table.spec(), @@ -2434,7 +2442,8 @@ def _write_delete_manifest() -> List[ManifestFile]: # Check if we need to mark the files as deleted deleted_entries = self._deleted_entries() if len(deleted_entries) > 0: - output_file_location = _new_manifest_path(location=self._table.location(), num=1, commit_uuid=self._commit_uuid) + output_file_location = _new_manifest_path(location=self._table.location(), num=1, commit_uuid=self.commit_uuid) + with write_manifest( format_version=self._table.format_version, spec=self._table.spec(), @@ -2477,7 +2486,7 @@ def commit(self) -> Snapshot: summary = self._summary() manifest_list_file_path = _generate_manifest_list_path( - location=self._table.location(), snapshot_id=self._snapshot_id, attempt=0, commit_uuid=self._commit_uuid + location=self._table.location(), snapshot_id=self._snapshot_id, attempt=0, commit_uuid=self.commit_uuid ) with write_manifest_list( format_version=self._table.metadata.format_version, From fd9dc8837d4b9054de2e51ee360ead517f471271 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Tue, 20 Feb 2024 14:55:20 -0700 Subject: [PATCH 126/169] Update NameMapping on `update_schema()` (#441) * update name-mapping * Update __init__.py Co-authored-by: Fokko Driesprong * Update pyiceberg/table/name_mapping.py Co-authored-by: Fokko Driesprong * validation mode after * type --------- Co-authored-by: Fokko Driesprong --- pyiceberg/table/__init__.py | 16 +++-- pyiceberg/table/name_mapping.py | 97 ++++++++++++++++++++++++--- tests/integration/test_rest_schema.py | 21 +++++- tests/table/test_name_mapping.py | 73 +++++++++++++++++++- 4 files changed, 188 insertions(+), 19 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index d70d4380f1..2f891cd14a 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -84,11 +84,7 @@ TableMetadata, TableMetadataUtil, ) -from pyiceberg.table.name_mapping import ( - NameMapping, - create_mapping_from_schema, - parse_mapping_from_json, -) +from pyiceberg.table.name_mapping import NameMapping, parse_mapping_from_json, update_mapping from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef from pyiceberg.table.snapshots import ( Operation, @@ -994,12 +990,12 @@ def update_snapshot(self) -> UpdateSnapshot: """ return UpdateSnapshot(self) - def name_mapping(self) -> NameMapping: + def name_mapping(self) -> Optional[NameMapping]: """Return the table's field-id NameMapping.""" if name_mapping_json := self.properties.get(TableProperties.DEFAULT_NAME_MAPPING): return parse_mapping_from_json(name_mapping_json) else: - return create_mapping_from_schema(self.schema()) + return None def append(self, df: pa.Table) -> None: """ @@ -1950,6 +1946,12 @@ def commit(self) -> None: else: updates = (SetCurrentSchemaUpdate(schema_id=existing_schema_id),) # type: ignore + if name_mapping := self._table.name_mapping(): + updated_name_mapping = update_mapping(name_mapping, self._updates, self._adds) + updates += ( # type: ignore + SetPropertiesUpdate(updates={TableProperties.DEFAULT_NAME_MAPPING: updated_name_mapping.model_dump_json()}), + ) + if self._transaction is not None: self._transaction._append_updates(*updates) # pylint: disable=W0212 self._transaction._append_requirements(*requirements) # pylint: disable=W0212 diff --git a/pyiceberg/table/name_mapping.py b/pyiceberg/table/name_mapping.py index ffe96359a8..94974836cd 100644 --- a/pyiceberg/table/name_mapping.py +++ b/pyiceberg/table/name_mapping.py @@ -26,7 +26,7 @@ from abc import ABC, abstractmethod from collections import ChainMap from functools import cached_property, singledispatch -from typing import Any, Dict, Generic, List, TypeVar, Union +from typing import Any, Dict, Generic, List, Optional, TypeVar, Union from pydantic import Field, conlist, field_validator, model_serializer @@ -45,6 +45,18 @@ class MappedField(IcebergBaseModel): def convert_null_to_empty_List(cls, v: Any) -> Any: return v or [] + @field_validator('names', mode='after') + @classmethod + def check_at_least_one(cls, v: List[str]) -> Any: + """ + Conlist constraint does not seem to be validating the class on instantiation. + + Adding a custom validator to enforce min_length=1 constraint. + """ + if len(v) < 1: + raise ValueError("At least one mapped name must be provided for the field") + return v + @model_serializer def ser_model(self) -> Dict[str, Any]: """Set custom serializer to leave out the field when it is empty.""" @@ -93,24 +105,25 @@ def __str__(self) -> str: return "[\n " + "\n ".join([str(e) for e in self.root]) + "\n]" +S = TypeVar('S') T = TypeVar("T") -class NameMappingVisitor(Generic[T], ABC): +class NameMappingVisitor(Generic[S, T], ABC): @abstractmethod - def mapping(self, nm: NameMapping, field_results: T) -> T: + def mapping(self, nm: NameMapping, field_results: S) -> S: """Visit a NameMapping.""" @abstractmethod - def fields(self, struct: List[MappedField], field_results: List[T]) -> T: + def fields(self, struct: List[MappedField], field_results: List[T]) -> S: """Visit a List[MappedField].""" @abstractmethod - def field(self, field: MappedField, field_result: T) -> T: + def field(self, field: MappedField, field_result: S) -> T: """Visit a MappedField.""" -class _IndexByName(NameMappingVisitor[Dict[str, MappedField]]): +class _IndexByName(NameMappingVisitor[Dict[str, MappedField], Dict[str, MappedField]]): def mapping(self, nm: NameMapping, field_results: Dict[str, MappedField]) -> Dict[str, MappedField]: return field_results @@ -129,18 +142,18 @@ def field(self, field: MappedField, field_result: Dict[str, MappedField]) -> Dic @singledispatch -def visit_name_mapping(obj: Union[NameMapping, List[MappedField], MappedField], visitor: NameMappingVisitor[T]) -> T: +def visit_name_mapping(obj: Union[NameMapping, List[MappedField], MappedField], visitor: NameMappingVisitor[S, T]) -> S: """Traverse the name mapping in post-order traversal.""" raise NotImplementedError(f"Cannot visit non-type: {obj}") @visit_name_mapping.register(NameMapping) -def _(obj: NameMapping, visitor: NameMappingVisitor[T]) -> T: +def _(obj: NameMapping, visitor: NameMappingVisitor[S, T]) -> S: return visitor.mapping(obj, visit_name_mapping(obj.root, visitor)) @visit_name_mapping.register(list) -def _(fields: List[MappedField], visitor: NameMappingVisitor[T]) -> T: +def _(fields: List[MappedField], visitor: NameMappingVisitor[S, T]) -> S: results = [visitor.field(field, visit_name_mapping(field.fields, visitor)) for field in fields] return visitor.fields(fields, results) @@ -175,5 +188,71 @@ def primitive(self, primitive: PrimitiveType) -> List[MappedField]: return [] +class _UpdateMapping(NameMappingVisitor[List[MappedField], MappedField]): + _updates: Dict[int, NestedField] + _adds: Dict[int, List[NestedField]] + + def __init__(self, updates: Dict[int, NestedField], adds: Dict[int, List[NestedField]]): + self._updates = updates + self._adds = adds + + @staticmethod + def _remove_reassigned_names(field: MappedField, assignments: Dict[str, int]) -> Optional[MappedField]: + removed_names = set() + for name in field.names: + if (assigned_id := assignments.get(name)) and assigned_id != field.field_id: + removed_names.add(name) + + remaining_names = [f for f in field.names if f not in removed_names] + if remaining_names: + return MappedField(field_id=field.field_id, names=remaining_names, fields=field.fields) + else: + return None + + def _add_new_fields(self, mapped_fields: List[MappedField], parent_id: int) -> List[MappedField]: + if fields_to_add := self._adds.get(parent_id): + fields: List[MappedField] = [] + new_fields: List[MappedField] = [] + + for add in fields_to_add: + new_fields.append( + MappedField(field_id=add.field_id, names=[add.name], fields=visit(add.field_type, _CreateMapping())) + ) + + reassignments = {f.name: f.field_id for f in fields_to_add} + fields = [ + updated_field + for field in mapped_fields + if (updated_field := self._remove_reassigned_names(field, reassignments)) is not None + ] + new_fields + return fields + else: + return mapped_fields + + def mapping(self, nm: NameMapping, field_results: List[MappedField]) -> List[MappedField]: + return self._add_new_fields(field_results, -1) + + def fields(self, struct: List[MappedField], field_results: List[MappedField]) -> List[MappedField]: + reassignments: Dict[str, int] = { + update.name: update.field_id for f in field_results if (update := self._updates.get(f.field_id)) + } + return [ + updated_field + for field in field_results + if (updated_field := self._remove_reassigned_names(field, reassignments)) is not None + ] + + def field(self, field: MappedField, field_result: List[MappedField]) -> MappedField: + field_names = field.names + if (update := self._updates.get(field.field_id)) is not None and update.name not in field_names: + field_names.append(update.name) + + return MappedField(field_id=field.field_id, names=field_names, fields=self._add_new_fields(field_result, field.field_id)) + + def create_mapping_from_schema(schema: Schema) -> NameMapping: return NameMapping(visit(schema, _CreateMapping())) + + +def update_mapping(mapping: NameMapping, updates: Dict[int, NestedField], adds: Dict[int, List[NestedField]]) -> NameMapping: + return NameMapping(visit_name_mapping(mapping, _UpdateMapping(updates, adds))) diff --git a/tests/integration/test_rest_schema.py b/tests/integration/test_rest_schema.py index a3320e4d3a..aae07caba9 100644 --- a/tests/integration/test_rest_schema.py +++ b/tests/integration/test_rest_schema.py @@ -22,7 +22,8 @@ from pyiceberg.exceptions import CommitFailedException, NoSuchTableError, ValidationError from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema, prune_columns -from pyiceberg.table import Table, UpdateSchema +from pyiceberg.table import Table, TableProperties, UpdateSchema +from pyiceberg.table.name_mapping import MappedField, NameMapping, create_mapping_from_schema from pyiceberg.table.sorting import SortField, SortOrder from pyiceberg.transforms import IdentityTransform from pyiceberg.types import ( @@ -73,7 +74,11 @@ def _create_table_with_schema(catalog: Catalog, schema: Schema) -> Table: catalog.drop_table(tbl_name) except NoSuchTableError: pass - return catalog.create_table(identifier=tbl_name, schema=schema) + return catalog.create_table( + identifier=tbl_name, + schema=schema, + properties={TableProperties.DEFAULT_NAME_MAPPING: create_mapping_from_schema(schema).model_dump_json()}, + ) @pytest.mark.integration @@ -674,6 +679,13 @@ def test_rename_simple(simple_table: Table) -> None: identifier_field_ids=[2], ) + # Check that the name mapping gets updated + assert simple_table.name_mapping() == NameMapping([ + MappedField(field_id=1, names=['foo', 'vo']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + ]) + @pytest.mark.integration def test_rename_simple_nested(catalog: Catalog) -> None: @@ -701,6 +713,11 @@ def test_rename_simple_nested(catalog: Catalog) -> None: ), ) + # Check that the name mapping gets updated + assert tbl.name_mapping() == NameMapping([ + MappedField(field_id=1, names=['foo'], fields=[MappedField(field_id=2, names=['bar', 'vo'])]), + ]) + @pytest.mark.integration def test_rename_simple_nested_with_dots(catalog: Catalog) -> None: diff --git a/tests/table/test_name_mapping.py b/tests/table/test_name_mapping.py index d74aa3234c..e039415ce3 100644 --- a/tests/table/test_name_mapping.py +++ b/tests/table/test_name_mapping.py @@ -17,7 +17,14 @@ import pytest from pyiceberg.schema import Schema -from pyiceberg.table.name_mapping import MappedField, NameMapping, create_mapping_from_schema, parse_mapping_from_json +from pyiceberg.table.name_mapping import ( + MappedField, + NameMapping, + create_mapping_from_schema, + parse_mapping_from_json, + update_mapping, +) +from pyiceberg.types import NestedField, StringType @pytest.fixture(scope="session") @@ -238,3 +245,67 @@ def test_mapping_lookup_by_name(table_name_mapping_nested: NameMapping) -> None: with pytest.raises(ValueError, match="Could not find field with name: boom"): table_name_mapping_nested.find("boom") + + +def test_invalid_mapped_field() -> None: + with pytest.raises(ValueError): + MappedField(field_id=1, names=[]) + + +def test_update_mapping_no_updates_or_adds(table_name_mapping_nested: NameMapping) -> None: + assert update_mapping(table_name_mapping_nested, {}, {}) == table_name_mapping_nested + + +def test_update_mapping(table_name_mapping_nested: NameMapping) -> None: + updates = {1: NestedField(1, "foo_update", StringType(), True)} + adds = { + -1: [NestedField(18, "add_18", StringType(), True)], + 15: [NestedField(19, "name", StringType(), True), NestedField(20, "add_20", StringType(), True)], + } + + expected = NameMapping([ + MappedField(field_id=1, names=['foo', 'foo_update']), + MappedField(field_id=2, names=['bar']), + MappedField(field_id=3, names=['baz']), + MappedField(field_id=4, names=['qux'], fields=[MappedField(field_id=5, names=['element'])]), + MappedField( + field_id=6, + names=['quux'], + fields=[ + MappedField(field_id=7, names=['key']), + MappedField( + field_id=8, + names=['value'], + fields=[ + MappedField(field_id=9, names=['key']), + MappedField(field_id=10, names=['value']), + ], + ), + ], + ), + MappedField( + field_id=11, + names=['location'], + fields=[ + MappedField( + field_id=12, + names=['element'], + fields=[ + MappedField(field_id=13, names=['latitude']), + MappedField(field_id=14, names=['longitude']), + ], + ) + ], + ), + MappedField( + field_id=15, + names=['person'], + fields=[ + MappedField(field_id=17, names=['age']), + MappedField(field_id=19, names=['name']), + MappedField(field_id=20, names=['add_20']), + ], + ), + MappedField(field_id=18, names=['add_18']), + ]) + assert update_mapping(table_name_mapping_nested, updates, adds) == expected From 9b01248e3aab103b1bee87a8c5b1b94c536dd27c Mon Sep 17 00:00:00 2001 From: Hussein Awala Date: Tue, 20 Feb 2024 23:03:47 +0100 Subject: [PATCH 127/169] Implement `create_table_if_not_exists` (#415) * Feat: Add fail_if_exists param to create_table * create create_table_if_not_exists method * fix reset test * fix mypy check --- pyiceberg/catalog/__init__.py | 30 ++++++++++- tests/catalog/integration_test_dynamodb.py | 9 ++++ tests/catalog/integration_test_glue.py | 9 ++++ tests/catalog/test_dynamodb.py | 12 +++++ tests/catalog/test_rest.py | 60 +++++++++++++++++++++- tests/catalog/test_sql.py | 17 ++++++ 6 files changed, 135 insertions(+), 2 deletions(-) diff --git a/pyiceberg/catalog/__init__.py b/pyiceberg/catalog/__init__.py index 6e5dc2748f..f7e34ab23c 100644 --- a/pyiceberg/catalog/__init__.py +++ b/pyiceberg/catalog/__init__.py @@ -36,7 +36,7 @@ cast, ) -from pyiceberg.exceptions import NoSuchNamespaceError, NoSuchTableError, NotInstalledError +from pyiceberg.exceptions import NoSuchNamespaceError, NoSuchTableError, NotInstalledError, TableAlreadyExistsError from pyiceberg.io import FileIO, load_file_io from pyiceberg.manifest import ManifestFile from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionSpec @@ -315,6 +315,34 @@ def create_table( TableAlreadyExistsError: If a table with the name already exists. """ + def create_table_if_not_exists( + self, + identifier: Union[str, Identifier], + schema: Union[Schema, "pa.Schema"], + location: Optional[str] = None, + partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, + sort_order: SortOrder = UNSORTED_SORT_ORDER, + properties: Properties = EMPTY_DICT, + ) -> Table: + """Create a table if it does not exist. + + Args: + identifier (str | Identifier): Table identifier. + schema (Schema): Table's schema. + location (str | None): Location for the table. Optional Argument. + partition_spec (PartitionSpec): PartitionSpec for the table. + sort_order (SortOrder): SortOrder for the table. + properties (Properties): Table properties that can be a string based dictionary. + + Returns: + Table: the created table instance if the table does not exist, else the existing + table instance. + """ + try: + return self.create_table(identifier, schema, location, partition_spec, sort_order, properties) + except TableAlreadyExistsError: + return self.load_table(identifier) + @abstractmethod def load_table(self, identifier: Union[str, Identifier]) -> Table: """Load the table's metadata and returns the table instance. diff --git a/tests/catalog/integration_test_dynamodb.py b/tests/catalog/integration_test_dynamodb.py index 5ca8767d6d..591e489b83 100644 --- a/tests/catalog/integration_test_dynamodb.py +++ b/tests/catalog/integration_test_dynamodb.py @@ -96,6 +96,15 @@ def test_create_duplicated_table(test_catalog: Catalog, table_schema_nested: Sch test_catalog.create_table((database_name, table_name), table_schema_nested) +def test_create_table_if_not_exists_duplicated_table( + test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + test_catalog.create_namespace(database_name) + table1 = test_catalog.create_table((database_name, table_name), table_schema_nested) + table2 = test_catalog.create_table_if_not_exists((database_name, table_name), table_schema_nested) + assert table1.identifier == table2.identifier + + def test_load_table(test_catalog: Catalog, table_schema_nested: Schema, database_name: str, table_name: str) -> None: identifier = (database_name, table_name) test_catalog.create_namespace(database_name) diff --git a/tests/catalog/integration_test_glue.py b/tests/catalog/integration_test_glue.py index a56e4c6aaa..a685b7da7b 100644 --- a/tests/catalog/integration_test_glue.py +++ b/tests/catalog/integration_test_glue.py @@ -200,6 +200,15 @@ def test_create_duplicated_table(test_catalog: Catalog, table_schema_nested: Sch test_catalog.create_table((database_name, table_name), table_schema_nested) +def test_create_table_if_not_exists_duplicated_table( + test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str +) -> None: + test_catalog.create_namespace(database_name) + table1 = test_catalog.create_table((database_name, table_name), table_schema_nested) + table2 = test_catalog.create_table_if_not_exists((database_name, table_name), table_schema_nested) + assert table1.identifier == table2.identifier + + def test_load_table(test_catalog: Catalog, table_schema_nested: Schema, table_name: str, database_name: str) -> None: identifier = (database_name, table_name) test_catalog.create_namespace(database_name) diff --git a/tests/catalog/test_dynamodb.py b/tests/catalog/test_dynamodb.py index 2cb7c46a15..218b0e8be7 100644 --- a/tests/catalog/test_dynamodb.py +++ b/tests/catalog/test_dynamodb.py @@ -176,6 +176,18 @@ def test_create_duplicated_table( test_catalog.create_table(identifier, table_schema_nested) +@mock_aws +def test_create_table_if_not_exists_duplicated_table( + _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + identifier = (database_name, table_name) + test_catalog = DynamoDbCatalog("test_ddb_catalog", **{"warehouse": f"s3://{BUCKET_NAME}", "s3.endpoint": moto_endpoint_url}) + test_catalog.create_namespace(namespace=database_name) + table1 = test_catalog.create_table(identifier, table_schema_nested) + table2 = test_catalog.create_table_if_not_exists(identifier, table_schema_nested) + assert table1.identifier == table2.identifier + + @mock_aws def test_load_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 7ae0d19558..2b698d9c1e 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -16,7 +16,7 @@ # under the License. # pylint: disable=redefined-outer-name,unused-argument import os -from typing import Any, Dict, cast +from typing import Any, Callable, Dict, cast from unittest import mock import pytest @@ -560,6 +560,64 @@ def test_create_table_409(rest_mock: Mocker, table_schema_simple: Schema) -> Non assert "Table already exists" in str(e.value) +def test_create_table_if_not_exists_200( + rest_mock: Mocker, table_schema_simple: Schema, example_table_metadata_no_snapshot_v1_rest_json: Dict[str, Any] +) -> None: + def json_callback() -> Callable[[Any, Any], Dict[str, Any]]: + call_count = 0 + + def callback(request: Any, context: Any) -> Dict[str, Any]: + nonlocal call_count + call_count += 1 + + if call_count == 1: + context.status_code = 200 + return example_table_metadata_no_snapshot_v1_rest_json + else: + context.status_code = 409 + return { + "error": { + "message": "Table already exists: fokko.already_exists in warehouse 8bcb0838-50fc-472d-9ddb-8feb89ef5f1e", + "type": "AlreadyExistsException", + "code": 409, + } + } + + return callback + + rest_mock.post( + f"{TEST_URI}v1/namespaces/fokko/tables", + json=json_callback(), + request_headers=TEST_HEADERS, + ) + rest_mock.get( + f"{TEST_URI}v1/namespaces/fokko/tables/fokko2", + json=example_table_metadata_no_snapshot_v1_rest_json, + status_code=200, + request_headers=TEST_HEADERS, + ) + catalog = RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN) + table1 = catalog.create_table( + identifier=("fokko", "fokko2"), + schema=table_schema_simple, + location=None, + partition_spec=PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=TruncateTransform(width=3), name="id"), spec_id=1 + ), + sort_order=SortOrder(SortField(source_id=2, transform=IdentityTransform())), + properties={"owner": "fokko"}, + ) + table2 = catalog.create_table_if_not_exists( + identifier=("fokko", "fokko2"), + schema=table_schema_simple, + location=None, + partition_spec=PartitionSpec(PartitionField(source_id=1, field_id=1000, transform=TruncateTransform(width=3), name="id")), + sort_order=SortOrder(SortField(source_id=2, transform=IdentityTransform())), + properties={"owner": "fokko"}, + ) + assert table1 == table2 + + def test_create_table_419(rest_mock: Mocker, table_schema_simple: Schema) -> None: rest_mock.post( f"{TEST_URI}v1/namespaces/fokko/tables", diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index f6ff78283b..bc75eb6300 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -247,6 +247,23 @@ def test_create_duplicated_table(catalog: SqlCatalog, table_schema_nested: Schem catalog.create_table(random_identifier, table_schema_nested) +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + ], +) +def test_create_table_if_not_exists_duplicated_table( + catalog: SqlCatalog, table_schema_nested: Schema, random_identifier: Identifier +) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + table1 = catalog.create_table(random_identifier, table_schema_nested) + table2 = catalog.create_table_if_not_exists(random_identifier, table_schema_nested) + assert table1.identifier == table2.identifier + + @pytest.mark.parametrize( 'catalog', [ From 92e3a43a81ddbf01552d6148ae3e876eb6269a15 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 17:32:45 -0800 Subject: [PATCH 128/169] Build: Bump coverage from 7.4.1 to 7.4.2 (#457) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.4.1 to 7.4.2. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.4.1...7.4.2) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 108 ++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 55 insertions(+), 55 deletions(-) diff --git a/poetry.lock b/poetry.lock index a772f46877..14668f0340 100644 --- a/poetry.lock +++ b/poetry.lock @@ -641,63 +641,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.1" +version = "7.4.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf54c3e089179d9d23900e3efc86d46e4431188d9a657f345410eecdd0151f50"}, + {file = "coverage-7.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe6e43c8b510719b48af7db9631b5fbac910ade4bd90e6378c85ac5ac706382c"}, + {file = "coverage-7.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b98c89db1b150d851a7840142d60d01d07677a18f0f46836e691c38134ed18b"}, + {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5f9683be6a5b19cd776ee4e2f2ffb411424819c69afab6b2db3a0a364ec6642"}, + {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cdcbf7b9cb83fe047ee09298e25b1cd1636824067166dc97ad0543b079d22f"}, + {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2599972b21911111114100d362aea9e70a88b258400672626efa2b9e2179609c"}, + {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ef00d31b7569ed3cb2036f26565f1984b9fc08541731ce01012b02a4c238bf03"}, + {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:20a875bfd8c282985c4720c32aa05056f77a68e6d8bbc5fe8632c5860ee0b49b"}, + {file = "coverage-7.4.2-cp310-cp310-win32.whl", hash = "sha256:b3f2b1eb229f23c82898eedfc3296137cf1f16bb145ceab3edfd17cbde273fb7"}, + {file = "coverage-7.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7df95fdd1432a5d2675ce630fef5f239939e2b3610fe2f2b5bf21fa505256fa3"}, + {file = "coverage-7.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8ddbd158e069dded57738ea69b9744525181e99974c899b39f75b2b29a624e2"}, + {file = "coverage-7.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81a5fb41b0d24447a47543b749adc34d45a2cf77b48ca74e5bf3de60a7bd9edc"}, + {file = "coverage-7.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2412e98e70f16243be41d20836abd5f3f32edef07cbf8f407f1b6e1ceae783ac"}, + {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb79414c15c6f03f56cc68fa06994f047cf20207c31b5dad3f6bab54a0f66ef"}, + {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf89ab85027427d351f1de918aff4b43f4eb5f33aff6835ed30322a86ac29c9e"}, + {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a178b7b1ac0f1530bb28d2e51f88c0bab3e5949835851a60dda80bff6052510c"}, + {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:06fe398145a2e91edaf1ab4eee66149c6776c6b25b136f4a86fcbbb09512fd10"}, + {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:18cac867950943fe93d6cd56a67eb7dcd2d4a781a40f4c1e25d6f1ed98721a55"}, + {file = "coverage-7.4.2-cp311-cp311-win32.whl", hash = "sha256:f72cdd2586f9a769570d4b5714a3837b3a59a53b096bb954f1811f6a0afad305"}, + {file = "coverage-7.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:d779a48fac416387dd5673fc5b2d6bd903ed903faaa3247dc1865c65eaa5a93e"}, + {file = "coverage-7.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:adbdfcda2469d188d79771d5696dc54fab98a16d2ef7e0875013b5f56a251047"}, + {file = "coverage-7.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ac4bab32f396b03ebecfcf2971668da9275b3bb5f81b3b6ba96622f4ef3f6e17"}, + {file = "coverage-7.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:006d220ba2e1a45f1de083d5022d4955abb0aedd78904cd5a779b955b019ec73"}, + {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3733545eb294e5ad274abe131d1e7e7de4ba17a144505c12feca48803fea5f64"}, + {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a9e754aa250fe61f0f99986399cec086d7e7a01dd82fd863a20af34cbce962"}, + {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2ed37e16cf35c8d6e0b430254574b8edd242a367a1b1531bd1adc99c6a5e00fe"}, + {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b953275d4edfab6cc0ed7139fa773dfb89e81fee1569a932f6020ce7c6da0e8f"}, + {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32b4ab7e6c924f945cbae5392832e93e4ceb81483fd6dc4aa8fb1a97b9d3e0e1"}, + {file = "coverage-7.4.2-cp312-cp312-win32.whl", hash = "sha256:f5df76c58977bc35a49515b2fbba84a1d952ff0ec784a4070334dfbec28a2def"}, + {file = "coverage-7.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:34423abbaad70fea9d0164add189eabaea679068ebdf693baa5c02d03e7db244"}, + {file = "coverage-7.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b11f9c6587668e495cc7365f85c93bed34c3a81f9f08b0920b87a89acc13469"}, + {file = "coverage-7.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:51593a1f05c39332f623d64d910445fdec3d2ac2d96b37ce7f331882d5678ddf"}, + {file = "coverage-7.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69f1665165ba2fe7614e2f0c1aed71e14d83510bf67e2ee13df467d1c08bf1e8"}, + {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3c8bbb95a699c80a167478478efe5e09ad31680931ec280bf2087905e3b95ec"}, + {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:175f56572f25e1e1201d2b3e07b71ca4d201bf0b9cb8fad3f1dfae6a4188de86"}, + {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8562ca91e8c40864942615b1d0b12289d3e745e6b2da901d133f52f2d510a1e3"}, + {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a1ef0f173e1a19738f154fb3644f90d0ada56fe6c9b422f992b04266c55d5a"}, + {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f40ac873045db4fd98a6f40387d242bde2708a3f8167bd967ccd43ad46394ba2"}, + {file = "coverage-7.4.2-cp38-cp38-win32.whl", hash = "sha256:d1b750a8409bec61caa7824bfd64a8074b6d2d420433f64c161a8335796c7c6b"}, + {file = "coverage-7.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b4ae777bebaed89e3a7e80c4a03fac434a98a8abb5251b2a957d38fe3fd30088"}, + {file = "coverage-7.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ff7f92ae5a456101ca8f48387fd3c56eb96353588e686286f50633a611afc95"}, + {file = "coverage-7.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:861d75402269ffda0b33af94694b8e0703563116b04c681b1832903fac8fd647"}, + {file = "coverage-7.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3507427d83fa961cbd73f11140f4a5ce84208d31756f7238d6257b2d3d868405"}, + {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf711d517e21fb5bc429f5c4308fbc430a8585ff2a43e88540264ae87871e36a"}, + {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c00e54f0bd258ab25e7f731ca1d5144b0bf7bec0051abccd2bdcff65fa3262c9"}, + {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8e845d894e39fb53834da826078f6dc1a933b32b1478cf437007367efaf6f6a"}, + {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:840456cb1067dc350af9080298c7c2cfdddcedc1cb1e0b30dceecdaf7be1a2d3"}, + {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c11ca2df2206a4e3e4c4567f52594637392ed05d7c7fb73b4ea1c658ba560265"}, + {file = "coverage-7.4.2-cp39-cp39-win32.whl", hash = "sha256:3ff5bdb08d8938d336ce4088ca1a1e4b6c8cd3bef8bb3a4c0eb2f37406e49643"}, + {file = "coverage-7.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:ac9e95cefcf044c98d4e2c829cd0669918585755dd9a92e28a1a7012322d0a95"}, + {file = "coverage-7.4.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:f593a4a90118d99014517c2679e04a4ef5aee2d81aa05c26c734d271065efcb6"}, + {file = "coverage-7.4.2.tar.gz", hash = "sha256:1a5ee18e3a8d766075ce9314ed1cb695414bae67df6a4b0805f5137d93d6f1cb"}, ] [package.dependencies] @@ -4298,4 +4298,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "0a531611ad417be25c145f961e4d808b01a977dd323510e2fcccc262191b7caf" +content-hash = "5553acdf7ad32ec9dbc74523b9f7ff241907b0d8129a40e3052750c3182e7539" diff --git a/pyproject.toml b/pyproject.toml index f3f6de7d22..71c7317dd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ pytest-checkdocs = "2.10.1" pytest-lazy-fixture = "0.6.3" pre-commit = "3.5.0" fastavro = "1.9.4" -coverage = { version = "^7.4.1", extras = ["toml"] } +coverage = { version = "^7.4.2", extras = ["toml"] } requests-mock = "1.11.0" moto = { version = "^5.0.2", extras = ["server"] } typing-extensions = "4.9.0" From 0c7c21c79e5def27e70bf3a258de05c32fa60454 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 18:28:26 -0800 Subject: [PATCH 129/169] Build: Bump getdaft from 0.2.15 to 0.2.16 (#456) Bumps [getdaft](https://github.com/Eventual-Inc/Daft) from 0.2.15 to 0.2.16. - [Release notes](https://github.com/Eventual-Inc/Daft/releases) - [Commits](https://github.com/Eventual-Inc/Daft/compare/v0.2.15...v0.2.16) --- updated-dependencies: - dependency-name: getdaft dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 14668f0340..6cd3829a78 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1201,17 +1201,17 @@ gcsfuse = ["fusepy"] [[package]] name = "getdaft" -version = "0.2.15" +version = "0.2.16" description = "Distributed Dataframes for Multimodal Data" optional = true python-versions = ">=3.7" files = [ - {file = "getdaft-0.2.15-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:95c16b0f25a78a13cab21128bcd0b5e7af65a14b12fd037ef8ec8aee5b7ac911"}, - {file = "getdaft-0.2.15-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:6afb66507ae899fb32adc8532b25ddd245a14d695a099ceb594afcb24848adb0"}, - {file = "getdaft-0.2.15-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:580a9971807e30a21136ae10eeb39cb2c880ab6eb87a464447206e4d36d52e2b"}, - {file = "getdaft-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44424e2adc80f12e3a404cc389a1b37eabbd1c8a3f0345d218852a65a7c3593d"}, - {file = "getdaft-0.2.15-cp37-abi3-win_amd64.whl", hash = "sha256:10103355c8a48455a1b2262bc1b7eca6b495059da7f2d220758bc273b734b898"}, - {file = "getdaft-0.2.15.tar.gz", hash = "sha256:5729915db8e15b6d42568cceef4a588ecdc5ce1a29f52c362e41d789bffb32e7"}, + {file = "getdaft-0.2.16-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:0a355301e79e00ab639150b84d380465f5f69ef9e6f36f1b5cf376e3d24229f6"}, + {file = "getdaft-0.2.16-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:317a8dff8169638cea40efbc01193d51f31c4ab441fc39f01f163f197fc264a2"}, + {file = "getdaft-0.2.16-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb1e5a62fcbb4a909532bb64dc7af56e7ac3fef1b8220448fcae1a8af0c6bc4"}, + {file = "getdaft-0.2.16-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b932510fd24b4f1f021abd67016bbcdacda3315d0e3ee2a8e339d82719adbd51"}, + {file = "getdaft-0.2.16-cp37-abi3-win_amd64.whl", hash = "sha256:c3e74f56b211f88e5c335276fe4670a0dfac8dc8b5c684b22fc570b1350cc40d"}, + {file = "getdaft-0.2.16.tar.gz", hash = "sha256:3fc7b2c3373bc374a90ecc566c6f0d830b9ce751d6c930c96b70b2c4c2afa0c4"}, ] [package.dependencies] From 5ea0617c2e34e596db296778d1f00ec3fad59d59 Mon Sep 17 00:00:00 2001 From: Hussein Awala Date: Wed, 21 Feb 2024 14:16:51 +0100 Subject: [PATCH 130/169] Accept pyarrow `LargeListType` and `FixedSizeListType` (#458) * Accept pyarrow LargeListType and FixedSizeListType * Combine the 3 register in one --- pyiceberg/io/pyarrow.py | 4 +++- tests/io/test_pyarrow_visitor.py | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index f391abfea2..76bb041195 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -690,7 +690,9 @@ def _(obj: pa.StructType, visitor: PyArrowSchemaVisitor[T]) -> T: @visit_pyarrow.register(pa.ListType) -def _(obj: pa.ListType, visitor: PyArrowSchemaVisitor[T]) -> T: +@visit_pyarrow.register(pa.FixedSizeListType) +@visit_pyarrow.register(pa.LargeListType) +def _(obj: Union[pa.ListType, pa.LargeListType, pa.FixedSizeListType], visitor: PyArrowSchemaVisitor[T]) -> T: visitor.before_list_element(obj.value_field) result = visit_pyarrow(obj.value_type, visitor) visitor.after_list_element(obj.value_field) diff --git a/tests/io/test_pyarrow_visitor.py b/tests/io/test_pyarrow_visitor.py index 7d35cae424..5b55bd61b6 100644 --- a/tests/io/test_pyarrow_visitor.py +++ b/tests/io/test_pyarrow_visitor.py @@ -245,6 +245,26 @@ def test_pyarrow_list_to_iceberg() -> None: assert visit_pyarrow(pyarrow_list, _ConvertToIceberg()) == expected +def test_pyarrow_large_list_to_iceberg() -> None: + pyarrow_list = pa.large_list(pa.field("element", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "1"})) + expected = ListType( + element_id=1, + element_type=IntegerType(), + element_required=True, + ) + assert visit_pyarrow(pyarrow_list, _ConvertToIceberg()) == expected + + +def test_pyarrow_fixed_size_list_to_iceberg() -> None: + pyarrow_list = pa.list_(pa.field("element", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "1"}), 1) + expected = ListType( + element_id=1, + element_type=IntegerType(), + element_required=True, + ) + assert visit_pyarrow(pyarrow_list, _ConvertToIceberg()) == expected + + def test_pyarrow_map_to_iceberg() -> None: pyarrow_map = pa.map_( pa.field("key", pa.int32(), nullable=False, metadata={"PARQUET:field_id": "1"}), From b829737940c5cf35d169a784ef45a6bddbc4a645 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 21 Feb 2024 18:17:53 +0100 Subject: [PATCH 131/169] Bump pre-commit and such (#442) --- .pre-commit-config.yaml | 4 ++-- pyiceberg/avro/decoder_fast.pyi | 17 +++++++++++++++++ pyiceberg/avro/decoder_fast.pyx | 4 +--- pyiceberg/catalog/rest.py | 2 +- pyiceberg/conversions.py | 2 +- pyiceberg/table/__init__.py | 2 +- pyproject.toml | 16 ++++++++-------- tests/avro/test_decoder.py | 10 +++++----- 8 files changed, 36 insertions(+), 21 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index db6742a8cd..afdabf6fce 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,13 +36,13 @@ repos: - id: ruff-format args: [ --preview ] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.6.1 + rev: v1.8.0 hooks: - id: mypy args: [--install-types, --non-interactive, --config=pyproject.toml] - repo: https://github.com/hadialqattan/pycln - rev: v2.3.0 + rev: v2.4.0 hooks: - id: pycln args: [--config=pyproject.toml] diff --git a/pyiceberg/avro/decoder_fast.pyi b/pyiceberg/avro/decoder_fast.pyi index cf45ce5066..cc367c4764 100644 --- a/pyiceberg/avro/decoder_fast.pyi +++ b/pyiceberg/avro/decoder_fast.pyi @@ -20,37 +20,54 @@ from pyiceberg.avro.decoder import BinaryDecoder class CythonBinaryDecoder(BinaryDecoder): def __init__(self, input_contents: bytes) -> None: pass + def tell(self) -> int: pass + def read(self, n: int) -> bytes: pass + def read_boolean(self) -> bool: pass + def read_int(self) -> int: pass + def read_ints(self, count: int) -> tuple[int, ...]: pass + def read_int_bytes_dict(self, count: int, dest: dict[int, bytes]) -> None: pass + def read_bytes(self) -> bytes: pass + def read_float(self) -> float: pass + def read_double(self) -> float: pass + def read_utf8(self) -> str: pass + def skip(self, n: int) -> None: pass + def skip_int(self) -> None: pass + def skip_boolean(self) -> None: pass + def skip_float(self) -> None: pass + def skip_double(self) -> None: pass + def skip_bytes(self) -> None: pass + def skip_utf8(self) -> None: pass diff --git a/pyiceberg/avro/decoder_fast.pyx b/pyiceberg/avro/decoder_fast.pyx index 182fd0e92e..52caec3308 100644 --- a/pyiceberg/avro/decoder_fast.pyx +++ b/pyiceberg/avro/decoder_fast.pyx @@ -32,9 +32,7 @@ unsigned_long_long_array_template = cython.declare(array.array, array.array('Q', @cython.final cdef class CythonBinaryDecoder: - """Implement a BinaryDecoder that reads from an in-memory buffer. - - """ + """Implement a BinaryDecoder that reads from an in-memory buffer.""" # This the data that is duplicated when the decoder is created. cdef unsigned char *_data diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 9ca8aa641f..e597b910f7 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -116,7 +116,7 @@ class Endpoints: SIGV4_SERVICE = "rest.signing-name" AUTH_URL = "rest.authorization-url" -NAMESPACE_SEPARATOR = b"\x1F".decode(UTF8) +NAMESPACE_SEPARATOR = b"\x1f".decode(UTF8) def _retry_hook(retry_state: RetryCallState) -> None: diff --git a/pyiceberg/conversions.py b/pyiceberg/conversions.py index b523deff48..2a03a4de35 100644 --- a/pyiceberg/conversions.py +++ b/pyiceberg/conversions.py @@ -267,7 +267,7 @@ def _(primitive_type: DecimalType, value: Decimal) -> bytes: return decimal_to_bytes(value) -@singledispatch +@singledispatch # type: ignore def from_bytes(primitive_type: PrimitiveType, b: bytes) -> L: # type: ignore """Convert bytes to a built-in python value. diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 2f891cd14a..b07da7e0fc 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -1695,7 +1695,7 @@ def rename_column(self, path_from: Union[str, Tuple[str, ...]], new_name: str) - from_field_correct_casing = self._schema.find_column_name(field_from.field_id) if from_field_correct_casing in self._identifier_field_names: self._identifier_field_names.remove(from_field_correct_casing) - new_identifier_path = f"{from_field_correct_casing[:-len(field_from.name)]}{new_name}" + new_identifier_path = f"{from_field_correct_casing[: -len(field_from.name)]}{new_name}" self._identifier_field_names.add(new_identifier_path) return self diff --git a/pyproject.toml b/pyproject.toml index 71c7317dd2..52c60d9482 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -312,7 +312,7 @@ source = ['pyiceberg/'] [tool.ruff] src = ['pyiceberg','tests'] extend-exclude = ["dev/provision.py"] -select = [ +lint.select = [ "E", # pycodestyle "W", # pycodestyle "F", # Pyflakes @@ -322,11 +322,11 @@ select = [ "I", # isort "UP", # pyupgrade ] -ignore = ["E501","E203","B024","B028","UP037"] +lint.ignore = ["E501","E203","B024","B028","UP037"] # Allow autofix for all enabled rules (when `--fix`) is provided. -fixable = ["ALL"] -unfixable = [] +lint.fixable = ["ALL"] +lint.unfixable = [] # Exclude a variety of commonly ignored directories. exclude = [ @@ -352,19 +352,19 @@ exclude = [ "node_modules", "venv", ] -per-file-ignores = {} +lint.per-file-ignores = {} # Ignore _all_ violations. # Same as Black. line-length = 130 # Allow unused variables when underscore-prefixed. -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +lint.dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" -[tool.ruff.pyupgrade] +[tool.ruff.lint.pyupgrade] # Preserve types, even if a file imports `from __future__ import annotations`. keep-runtime-typing = true -[tool.ruff.isort] +[tool.ruff.lint.isort] detect-same-package = true lines-between-types = 0 known-first-party = ["pyiceberg", "tests"] diff --git a/tests/avro/test_decoder.py b/tests/avro/test_decoder.py index bbcc7394f4..608e6ae2d5 100644 --- a/tests/avro/test_decoder.py +++ b/tests/avro/test_decoder.py @@ -144,13 +144,13 @@ def test_read_single_byte_at_the_time(decoder_class: Callable[[bytes], BinaryDec @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_read_float(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x00\x00\x9A\x41") + decoder = decoder_class(b"\x00\x00\x9a\x41") assert decoder.read_float() == 19.25 @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_skip_float(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x00\x00\x9A\x41") + decoder = decoder_class(b"\x00\x00\x9a\x41") assert decoder.tell() == 0 decoder.skip_float() assert decoder.tell() == 4 @@ -179,13 +179,13 @@ def test_read_bytes(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_read_utf8(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x04\x76\x6F") + decoder = decoder_class(b"\x04\x76\x6f") assert decoder.read_utf8() == "vo" @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_skip_utf8(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x04\x76\x6F") + decoder = decoder_class(b"\x04\x76\x6f") assert decoder.tell() == 0 decoder.skip_utf8() assert decoder.tell() == 3 @@ -193,7 +193,7 @@ def test_skip_utf8(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: @pytest.mark.parametrize("decoder_class", AVAILABLE_DECODERS) def test_read_int_as_float(decoder_class: Callable[[bytes], BinaryDecoder]) -> None: - decoder = decoder_class(b"\x00\x00\x9A\x41") + decoder = decoder_class(b"\x00\x00\x9a\x41") reader = resolve_reader(FloatType(), DoubleType()) assert reader.read(decoder) == 19.25 From 44948cd9bab78b509b6befc960b5f248b37f53fe Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 21 Feb 2024 18:22:07 +0100 Subject: [PATCH 132/169] docstring: Fix missing commit (#432) --- pyiceberg/catalog/__init__.py | 2 ++ pyiceberg/catalog/dynamodb.py | 1 + pyiceberg/catalog/glue.py | 2 +- pyiceberg/catalog/hive.py | 1 + pyiceberg/catalog/rest.py | 2 ++ pyiceberg/catalog/sql.py | 2 +- 6 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pyiceberg/catalog/__init__.py b/pyiceberg/catalog/__init__.py index f7e34ab23c..db83658f1f 100644 --- a/pyiceberg/catalog/__init__.py +++ b/pyiceberg/catalog/__init__.py @@ -413,6 +413,8 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. + CommitStateUnknownException: Failed due to an internal exception on the side of the catalog. """ @abstractmethod diff --git a/pyiceberg/catalog/dynamodb.py b/pyiceberg/catalog/dynamodb.py index d5f3b5e14c..b7b0f3ddb1 100644 --- a/pyiceberg/catalog/dynamodb.py +++ b/pyiceberg/catalog/dynamodb.py @@ -208,6 +208,7 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. """ raise NotImplementedError diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index 06484cb0e4..089a30ba61 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -404,7 +404,7 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. - CommitFailedException: If the commit failed. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. """ identifier_tuple = self.identifier_to_tuple_without_catalog( tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) diff --git a/pyiceberg/catalog/hive.py b/pyiceberg/catalog/hive.py index aba3c173e6..4d4370fc46 100644 --- a/pyiceberg/catalog/hive.py +++ b/pyiceberg/catalog/hive.py @@ -360,6 +360,7 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. """ identifier_tuple = self.identifier_to_tuple_without_catalog( tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index e597b910f7..c824e69133 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -596,6 +596,8 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. + CommitStateUnknownException: Failed due to an internal exception on the side of the catalog. """ response = self._session.post( self.url(Endpoints.update_table, prefixed=True, **self._split_identifier_for_path(table_request.identifier)), diff --git a/pyiceberg/catalog/sql.py b/pyiceberg/catalog/sql.py index 62a2dac54a..0059da6676 100644 --- a/pyiceberg/catalog/sql.py +++ b/pyiceberg/catalog/sql.py @@ -373,7 +373,7 @@ def _commit_table(self, table_request: CommitTableRequest) -> CommitTableRespons Raises: NoSuchTableError: If a table with the given identifier does not exist. - CommitFailedException: If the commit failed. + CommitFailedException: Requirement not met, or a conflict with a concurrent commit. """ identifier_tuple = self.identifier_to_tuple_without_catalog( tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) From 339ba53fe2f1563c2ed2a99d09c0986b2e7e8caf Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Sun, 25 Feb 2024 23:33:53 +0100 Subject: [PATCH 133/169] Improve error message in case of a mismatch (#352) * Nice error * Simplify a bit * Property Co-authored-by: Honah J. * Property Co-authored-by: Honah J. * Whitespace --------- Co-authored-by: Honah J. --- pyiceberg/io/pyarrow.py | 4 ++ pyiceberg/schema.py | 12 +++++ pyiceberg/table/__init__.py | 45 ++++++++++++++++++- pyiceberg/table/name_mapping.py | 6 ++- tests/table/test_init.py | 78 +++++++++++++++++++++++++++++++++ 5 files changed, 143 insertions(+), 2 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 76bb041195..d41f7a07a5 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -655,6 +655,10 @@ def pyarrow_to_schema(schema: pa.Schema, name_mapping: Optional[NameMapping] = N return visit_pyarrow(schema, visitor) +def _pyarrow_to_schema_without_ids(schema: pa.Schema) -> Schema: + return visit_pyarrow(schema, _ConvertToIcebergWithoutIDs()) + + @singledispatch def visit_pyarrow(obj: Union[pa.DataType, pa.Schema], visitor: PyArrowSchemaVisitor[T]) -> T: """Apply a pyarrow schema visitor to any point within a schema. diff --git a/pyiceberg/schema.py b/pyiceberg/schema.py index 6dd174f325..e805895a7b 100644 --- a/pyiceberg/schema.py +++ b/pyiceberg/schema.py @@ -22,6 +22,7 @@ from dataclasses import dataclass from functools import cached_property, partial, singledispatch from typing import ( + TYPE_CHECKING, Any, Callable, Dict, @@ -62,6 +63,11 @@ UUIDType, ) +if TYPE_CHECKING: + from pyiceberg.table.name_mapping import ( + NameMapping, + ) + T = TypeVar("T") P = TypeVar("P") @@ -221,6 +227,12 @@ def find_type(self, name_or_id: Union[str, int], case_sensitive: bool = True) -> def highest_field_id(self) -> int: return max(self._lazy_id_to_name.keys(), default=0) + @cached_property + def name_mapping(self) -> NameMapping: + from pyiceberg.table.name_mapping import create_mapping_from_schema + + return create_mapping_from_schema(self) + def find_column_name(self, column_id: int) -> Optional[str]: """Find a column name given a column ID. diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index b07da7e0fc..060f13772b 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -84,7 +84,11 @@ TableMetadata, TableMetadataUtil, ) -from pyiceberg.table.name_mapping import NameMapping, parse_mapping_from_json, update_mapping +from pyiceberg.table.name_mapping import ( + NameMapping, + parse_mapping_from_json, + update_mapping, +) from pyiceberg.table.refs import MAIN_BRANCH, SnapshotRef from pyiceberg.table.snapshots import ( Operation, @@ -129,6 +133,41 @@ _JAVA_LONG_MAX = 9223372036854775807 +def _check_schema(table_schema: Schema, other_schema: "pa.Schema") -> None: + from pyiceberg.io.pyarrow import _pyarrow_to_schema_without_ids, pyarrow_to_schema + + name_mapping = table_schema.name_mapping + try: + task_schema = pyarrow_to_schema(other_schema, name_mapping=name_mapping) + except ValueError as e: + other_schema = _pyarrow_to_schema_without_ids(other_schema) + additional_names = set(other_schema.column_names) - set(table_schema.column_names) + raise ValueError( + f"PyArrow table contains more columns: {', '.join(sorted(additional_names))}. Update the schema first (hint, use union_by_name)." + ) from e + + if table_schema.as_struct() != task_schema.as_struct(): + from rich.console import Console + from rich.table import Table as RichTable + + console = Console(record=True) + + rich_table = RichTable(show_header=True, header_style="bold") + rich_table.add_column("") + rich_table.add_column("Table field") + rich_table.add_column("Dataframe field") + + for lhs in table_schema.fields: + try: + rhs = task_schema.find_field(lhs.field_id) + rich_table.add_row("✅" if lhs == rhs else "❌", str(lhs), str(rhs)) + except ValueError: + rich_table.add_row("❌", str(lhs), "Missing") + + console.print(rich_table) + raise ValueError(f"Mismatch in fields:\n{console.export_text()}") + + class TableProperties: PARQUET_ROW_GROUP_SIZE_BYTES = "write.parquet.row-group-size-bytes" PARQUET_ROW_GROUP_SIZE_BYTES_DEFAULT = 128 * 1024 * 1024 # 128 MB @@ -1015,6 +1054,8 @@ def append(self, df: pa.Table) -> None: if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") + _check_schema(self.schema(), other_schema=df.schema) + with self.update_snapshot().fast_append() as update_snapshot: # skip writing data files if the dataframe is empty if df.shape[0] > 0: @@ -1045,6 +1086,8 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T if len(self.spec().fields) > 0: raise ValueError("Cannot write to partitioned tables") + _check_schema(self.schema(), other_schema=df.schema) + with self.update_snapshot().overwrite() as update_snapshot: # skip writing data files if the dataframe is empty if df.shape[0] > 0: diff --git a/pyiceberg/table/name_mapping.py b/pyiceberg/table/name_mapping.py index 94974836cd..baa15f168d 100644 --- a/pyiceberg/table/name_mapping.py +++ b/pyiceberg/table/name_mapping.py @@ -26,7 +26,7 @@ from abc import ABC, abstractmethod from collections import ChainMap from functools import cached_property, singledispatch -from typing import Any, Dict, Generic, List, Optional, TypeVar, Union +from typing import Any, Dict, Generic, Iterator, List, Optional, TypeVar, Union from pydantic import Field, conlist, field_validator, model_serializer @@ -97,6 +97,10 @@ def __len__(self) -> int: """Return the number of mappings.""" return len(self.root) + def __iter__(self) -> Iterator[MappedField]: + """Iterate over the mapped fields.""" + return iter(self.root) + def __str__(self) -> str: """Convert the name-mapping into a nicely formatted string.""" if len(self.root) == 0: diff --git a/tests/table/test_init.py b/tests/table/test_init.py index efee43b192..7bf65a67da 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -19,6 +19,7 @@ from copy import copy from typing import Dict +import pyarrow as pa import pytest from sortedcontainers import SortedList @@ -58,6 +59,7 @@ Table, UpdateSchema, _apply_table_update, + _check_schema, _generate_snapshot_id, _match_deletes_to_data_file, _TableMetadataUpdateContext, @@ -982,3 +984,79 @@ def test_correct_schema() -> None: _ = t.scan(snapshot_id=-1).projection() assert "Snapshot not found: -1" in str(exc_info.value) + + +def test_schema_mismatch_type(table_schema_simple: Schema) -> None: + other_schema = pa.schema(( + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.decimal128(18, 6), nullable=False), + pa.field("baz", pa.bool_(), nullable=True), + )) + + expected = r"""Mismatch in fields: +┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃ ┃ Table field ┃ Dataframe field ┃ +┡━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ ✅ │ 1: foo: optional string │ 1: foo: optional string │ +│ ❌ │ 2: bar: required int │ 2: bar: required decimal\(18, 6\) │ +│ ✅ │ 3: baz: optional boolean │ 3: baz: optional boolean │ +└────┴──────────────────────────┴─────────────────────────────────┘ +""" + + with pytest.raises(ValueError, match=expected): + _check_schema(table_schema_simple, other_schema) + + +def test_schema_mismatch_nullability(table_schema_simple: Schema) -> None: + other_schema = pa.schema(( + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=True), + pa.field("baz", pa.bool_(), nullable=True), + )) + + expected = """Mismatch in fields: +┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃ ┃ Table field ┃ Dataframe field ┃ +┡━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ ✅ │ 1: foo: optional string │ 1: foo: optional string │ +│ ❌ │ 2: bar: required int │ 2: bar: optional int │ +│ ✅ │ 3: baz: optional boolean │ 3: baz: optional boolean │ +└────┴──────────────────────────┴──────────────────────────┘ +""" + + with pytest.raises(ValueError, match=expected): + _check_schema(table_schema_simple, other_schema) + + +def test_schema_mismatch_missing_field(table_schema_simple: Schema) -> None: + other_schema = pa.schema(( + pa.field("foo", pa.string(), nullable=True), + pa.field("baz", pa.bool_(), nullable=True), + )) + + expected = """Mismatch in fields: +┏━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +┃ ┃ Table field ┃ Dataframe field ┃ +┡━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ ✅ │ 1: foo: optional string │ 1: foo: optional string │ +│ ❌ │ 2: bar: required int │ Missing │ +│ ✅ │ 3: baz: optional boolean │ 3: baz: optional boolean │ +└────┴──────────────────────────┴──────────────────────────┘ +""" + + with pytest.raises(ValueError, match=expected): + _check_schema(table_schema_simple, other_schema) + + +def test_schema_mismatch_additional_field(table_schema_simple: Schema) -> None: + other_schema = pa.schema(( + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=True), + pa.field("baz", pa.bool_(), nullable=True), + pa.field("new_field", pa.date32(), nullable=True), + )) + + expected = r"PyArrow table contains more columns: new_field. Update the schema first \(hint, use union_by_name\)." + + with pytest.raises(ValueError, match=expected): + _check_schema(table_schema_simple, other_schema) From b31922fd6b61c335930a5a563c08f827f83b8e4d Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Sun, 25 Feb 2024 18:26:19 -0800 Subject: [PATCH 134/169] Cleanup conftest, remove LocalOutputFile (#468) --- tests/conftest.py | 39 +-------------------------------------- 1 file changed, 1 insertion(+), 38 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 0e0fbd6836..aa66f36046 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -42,7 +42,6 @@ List, Optional, ) -from urllib.parse import urlparse import boto3 import pytest @@ -57,8 +56,6 @@ GCS_PROJECT_ID, GCS_TOKEN, GCS_TOKEN_EXPIRES_AT_MS, - OutputFile, - OutputStream, fsspec, load_file_io, ) @@ -88,7 +85,7 @@ import pyarrow as pa from moto.server import ThreadedMotoServer # type: ignore - from pyiceberg.io.pyarrow import PyArrowFile, PyArrowFileIO + from pyiceberg.io.pyarrow import PyArrowFileIO def pytest_collection_modifyitems(items: List[pytest.Item]) -> None: @@ -1456,40 +1453,6 @@ def simple_map() -> MapType: return MapType(key_id=19, key_type=StringType(), value_id=25, value_type=DoubleType(), value_required=False) -class LocalOutputFile(OutputFile): - """An OutputFile implementation for local files (for test use only).""" - - def __init__(self, location: str) -> None: - parsed_location = urlparse(location) # Create a ParseResult from the uri - if ( - parsed_location.scheme and parsed_location.scheme != "file" - ): # Validate that an uri is provided with a scheme of `file` - raise ValueError("LocalOutputFile location must have a scheme of `file`") - elif parsed_location.netloc: - raise ValueError(f"Network location is not allowed for LocalOutputFile: {parsed_location.netloc}") - - super().__init__(location=location) - self._path = parsed_location.path - - def __len__(self) -> int: - """Return the length of an instance of the LocalOutputFile class.""" - return os.path.getsize(self._path) - - def exists(self) -> bool: - return os.path.exists(self._path) - - def to_input_file(self) -> "PyArrowFile": - from pyiceberg.io.pyarrow import PyArrowFileIO - - return PyArrowFileIO().new_input(location=self.location) - - def create(self, overwrite: bool = False) -> OutputStream: - output_file = open(self._path, "wb" if overwrite else "xb") - if not issubclass(type(output_file), OutputStream): - raise TypeError("Object returned from LocalOutputFile.create(...) does not match the OutputStream protocol.") - return output_file - - @pytest.fixture(scope="session") def generated_manifest_entry_file(avro_schema_manifest_entry: Dict[str, Any]) -> Generator[str, None, None]: from fastavro import parse_schema, writer From 5c6010b3ad16472f7ef02ce93d3d1524f602bffb Mon Sep 17 00:00:00 2001 From: Anupam Saini <4581797+anupam-saini@users.noreply.github.com> Date: Mon, 26 Feb 2024 09:37:05 -0500 Subject: [PATCH 135/169] Use `update_table_metadata` in InMemory Catalog (#470) --- tests/catalog/test_base.py | 41 ++++++++++---------------------------- 1 file changed, 10 insertions(+), 31 deletions(-) diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index d15c90fee3..21c7ec1521 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -52,8 +52,9 @@ SetCurrentSchemaUpdate, Table, TableIdentifier, + update_table_metadata, ) -from pyiceberg.table.metadata import TableMetadata, TableMetadataV1, new_table_metadata +from pyiceberg.table.metadata import TableMetadataV1 from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.transforms import IdentityTransform from pyiceberg.typedef import EMPTY_DICT @@ -118,36 +119,13 @@ def register_table(self, identifier: Union[str, Identifier], metadata_location: raise NotImplementedError def _commit_table(self, table_request: CommitTableRequest) -> CommitTableResponse: - new_metadata: Optional[TableMetadata] = None - metadata_location = "" - for update in table_request.updates: - if isinstance(update, AddSchemaUpdate): - add_schema_update: AddSchemaUpdate = update - identifier = tuple(table_request.identifier.namespace.root) + (table_request.identifier.name,) - table = self.__tables[identifier] - new_metadata = new_table_metadata( - add_schema_update.schema_, - table.metadata.partition_specs[0], - table.sort_order(), - table.location(), - table.properties, - table.metadata.table_uuid, - ) - - table = Table( - identifier=identifier, - metadata=new_metadata, - metadata_location=f's3://warehouse/{"/".join(identifier)}/metadata/metadata.json', - io=load_file_io(), - catalog=self, - ) - - self.__tables[identifier] = table - metadata_location = f's3://warehouse/{"/".join(identifier)}/metadata/metadata.json' + identifier = tuple(table_request.identifier.namespace.root) + (table_request.identifier.name,) + table = self.__tables[identifier] + table.metadata = update_table_metadata(base_metadata=table.metadata, updates=table_request.updates) return CommitTableResponse( - metadata=new_metadata.model_dump() if new_metadata else {}, - metadata_location=metadata_location if metadata_location else "", + metadata=table.metadata.model_dump(), + metadata_location=table.location(), ) def load_table(self, identifier: Union[str, Identifier]) -> Table: @@ -617,8 +595,9 @@ def test_commit_table(catalog: InMemoryCatalog) -> None: # Then assert response.metadata.table_uuid == given_table.metadata.table_uuid - assert len(response.metadata.schemas) == 1 - assert response.metadata.schemas[0] == new_schema + assert len(response.metadata.schemas) == 2 + assert response.metadata.schemas[1] == new_schema + assert response.metadata.current_schema_id == new_schema.schema_id def test_add_column(catalog: InMemoryCatalog) -> None: From 5209874cd4685427520b67b64188f5e9919f2816 Mon Sep 17 00:00:00 2001 From: j7nhai <146867566+j7nhai@users.noreply.github.com> Date: Tue, 27 Feb 2024 17:11:29 +0800 Subject: [PATCH 136/169] enable set hadoop ugi for hive catalog (#472) * enable set ugi for hive catalog. * fix * Suggested changes * fix ruff-format Failed * fix failed and pass tests. --- mkdocs/docs/cli.md | 1 + mkdocs/docs/configuration.md | 1 + pyiceberg/catalog/hive.py | 8 ++++++-- pyiceberg/cli/console.py | 13 ++++++++++++- 4 files changed, 20 insertions(+), 3 deletions(-) diff --git a/mkdocs/docs/cli.md b/mkdocs/docs/cli.md index 695011a6ef..28e44955d7 100644 --- a/mkdocs/docs/cli.md +++ b/mkdocs/docs/cli.md @@ -36,6 +36,7 @@ Options: --catalog TEXT --verbose BOOLEAN --output [text|json] +--ugi TEXT --uri TEXT --credential TEXT --help Show this message and exit. diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index 8acc0a98cb..d69b9851d3 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -148,6 +148,7 @@ catalog: | Key | Example | Description | | ---------------------- | ----------------------- | -------------------------------------------------------------------------------------------------- | | uri | https://rest-catalog/ws | URI identifying the REST Server | +| ugi | t-1234:secret | Hadoop UGI for Hive client. | | credential | t-1234:secret | Credential to use for OAuth2 credential flow when initializing the catalog | | token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | | rest.sigv4-enabled | true | Sign requests to the REST Server using AWS SigV4 protocol | diff --git a/pyiceberg/catalog/hive.py b/pyiceberg/catalog/hive.py index 4d4370fc46..c24355f6fb 100644 --- a/pyiceberg/catalog/hive.py +++ b/pyiceberg/catalog/hive.py @@ -130,17 +130,21 @@ class _HiveClient: _transport: TTransport _client: Client + _ugi: Optional[List[str]] - def __init__(self, uri: str): + def __init__(self, uri: str, ugi: Optional[str] = None): url_parts = urlparse(uri) transport = TSocket.TSocket(url_parts.hostname, url_parts.port) self._transport = TTransport.TBufferedTransport(transport) protocol = TBinaryProtocol.TBinaryProtocol(transport) self._client = Client(protocol) + self._ugi = ugi.split(':') if ugi else None def __enter__(self) -> Client: self._transport.open() + if self._ugi: + self._client.set_ugi(*self._ugi) return self._client def __exit__( @@ -233,7 +237,7 @@ class HiveCatalog(Catalog): def __init__(self, name: str, **properties: str): super().__init__(name, **properties) - self._client = _HiveClient(properties["uri"]) + self._client = _HiveClient(properties["uri"], properties.get("ugi")) def _convert_hive_into_iceberg(self, table: HiveTable, io: FileIO) -> Table: properties: Dict[str, str] = table.parameters diff --git a/pyiceberg/cli/console.py b/pyiceberg/cli/console.py index 095c1ef6dc..0fbda10960 100644 --- a/pyiceberg/cli/console.py +++ b/pyiceberg/cli/console.py @@ -59,11 +59,22 @@ def wrapper(*args: Any, **kwargs: Any): # type: ignore @click.option("--catalog") @click.option("--verbose", type=click.BOOL) @click.option("--output", type=click.Choice(["text", "json"]), default="text") +@click.option("--ugi") @click.option("--uri") @click.option("--credential") @click.pass_context -def run(ctx: Context, catalog: Optional[str], verbose: bool, output: str, uri: Optional[str], credential: Optional[str]) -> None: +def run( + ctx: Context, + catalog: Optional[str], + verbose: bool, + output: str, + ugi: Optional[str], + uri: Optional[str], + credential: Optional[str], +) -> None: properties = {} + if ugi: + properties["ugi"] = ugi if uri: properties["uri"] = uri if credential: From c5735dc6cffb133abb1465f12a1d37097c277431 Mon Sep 17 00:00:00 2001 From: Rushil Shah <41440312+rushilshah1@users.noreply.github.com> Date: Tue, 27 Feb 2024 12:56:43 -0500 Subject: [PATCH 137/169] raise exception if namespace does not exist in load_namespace_properties (#477) --- pyiceberg/catalog/sql.py | 4 +++- tests/catalog/test_sql.py | 12 ++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/pyiceberg/catalog/sql.py b/pyiceberg/catalog/sql.py index 0059da6676..b6b2feeeb0 100644 --- a/pyiceberg/catalog/sql.py +++ b/pyiceberg/catalog/sql.py @@ -567,7 +567,9 @@ def load_namespace_properties(self, namespace: Union[str, Identifier]) -> Proper Raises: NoSuchNamespaceError: If a namespace with the given name does not exist. """ - database_name = self.identifier_to_database(namespace, NoSuchNamespaceError) + database_name = self.identifier_to_database(namespace) + if not self._namespace_exists(database_name): + raise NoSuchNamespaceError(f"Database {database_name} does not exists") stmt = select(IcebergNamespaceProperties).where( IcebergNamespaceProperties.catalog_name == self.name, IcebergNamespaceProperties.namespace == database_name diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index bc75eb6300..2b1167f1b6 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -726,6 +726,18 @@ def test_load_empty_namespace_properties(catalog: SqlCatalog, database_name: str assert listed_properties == {"exists": "true"} +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + ], +) +def test_load_namespace_properties_non_existing_namespace(catalog: SqlCatalog) -> None: + with pytest.raises(NoSuchNamespaceError): + catalog.load_namespace_properties("does_not_exist") + + @pytest.mark.parametrize( 'catalog', [ From a1099d81f202a4ed0bdbf69674844868761d5c89 Mon Sep 17 00:00:00 2001 From: Drew Gallardo Date: Tue, 27 Feb 2024 09:59:04 -0800 Subject: [PATCH 138/169] Add Support for Custom Header Configurations in RESTCatalog (#467) --- mkdocs/docs/configuration.md | 13 ++++++++++++ pyiceberg/catalog/rest.py | 17 ++++++++++++---- tests/catalog/test_rest.py | 39 ++++++++++++++++++++++++++++++++++++ 3 files changed, 65 insertions(+), 4 deletions(-) diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index d69b9851d3..f0eb56ebd5 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -156,6 +156,19 @@ catalog: | rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | | rest.authorization-url | https://auth-service/cc | Authentication URL to use for client credentials authentication (default: uri + 'v1/oauth/tokens') | +### Headers in RESTCatalog + +To configure custom headers in RESTCatalog, include them in the catalog properties with the prefix `header.`. This +ensures that all HTTP requests to the REST service include the specified headers. + +```yaml +catalog: + default: + uri: http://rest-catalog/ws/ + credential: t-1234:secret + header.content-type: application/vnd.api+json +``` + ## SQL Catalog The SQL catalog requires a database for its backend. PyIceberg supports PostgreSQL and SQLite through psycopg2. The database connection has to be configured using the `uri` property. See SQLAlchemy's [documentation for URL format](https://docs.sqlalchemy.org/en/20/core/engines.html#backend-specific-urls): diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index c824e69133..ab3d22efba 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -115,6 +115,7 @@ class Endpoints: SIGV4_REGION = "rest.signing-region" SIGV4_SERVICE = "rest.signing-name" AUTH_URL = "rest.authorization-url" +HEADER_PREFIX = "header." NAMESPACE_SEPARATOR = b"\x1f".decode(UTF8) @@ -242,10 +243,7 @@ def _create_session(self) -> Session: self._refresh_token(session, self.properties.get(TOKEN)) # Set HTTP headers - session.headers["Content-type"] = "application/json" - session.headers["X-Client-Version"] = ICEBERG_REST_SPEC_VERSION - session.headers["User-Agent"] = f"PyIceberg/{__version__}" - session.headers["X-Iceberg-Access-Delegation"] = "vended-credentials" + self._config_headers(session) # Configure SigV4 Request Signing if str(self.properties.get(SIGV4, False)).lower() == "true": @@ -458,6 +456,17 @@ def _refresh_token(self, session: Optional[Session] = None, new_token: Optional[ if token := self.properties.get(TOKEN): session.headers[AUTHORIZATION_HEADER] = f"{BEARER_PREFIX} {token}" + def _config_headers(self, session: Session) -> None: + session.headers["Content-type"] = "application/json" + session.headers["X-Client-Version"] = ICEBERG_REST_SPEC_VERSION + session.headers["User-Agent"] = f"PyIceberg/{__version__}" + session.headers["X-Iceberg-Access-Delegation"] = "vended-credentials" + header_properties = self._extract_headers_from_properties() + session.headers.update(header_properties) + + def _extract_headers_from_properties(self) -> Dict[str, str]: + return {key[len(HEADER_PREFIX) :]: value for key, value in self.properties.items() if key.startswith(HEADER_PREFIX)} + @retry(**_RETRY_ARGS) def create_table( self, diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 2b698d9c1e..fa35ec4548 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -167,6 +167,45 @@ def test_config_200(requests_mock: Mocker) -> None: assert history[1].url == "https://iceberg-test-catalog/v1/config?warehouse=s3%3A%2F%2Fsome-bucket" +def test_properties_sets_headers(requests_mock: Mocker) -> None: + requests_mock.get( + f"{TEST_URI}v1/config", + json={"defaults": {}, "overrides": {}}, + status_code=200, + ) + + catalog = RestCatalog( + "rest", uri=TEST_URI, warehouse="s3://some-bucket", **{"header.Content-Type": "application/vnd.api+json"} + ) + + assert ( + catalog._session.headers.get("Content-type") == "application/vnd.api+json" + ), "Expected 'Content-Type' header to be 'application/vnd.api+json'" + + assert ( + requests_mock.last_request.headers["Content-type"] == "application/vnd.api+json" + ), "Config request did not include expected 'Content-Type' header" + + +def test_config_sets_headers(requests_mock: Mocker) -> None: + namespace = "leden" + requests_mock.get( + f"{TEST_URI}v1/config", + json={"defaults": {"header.Content-Type": "application/vnd.api+json"}, "overrides": {}}, + status_code=200, + ) + requests_mock.post(f"{TEST_URI}v1/namespaces", json={"namespace": [namespace], "properties": {}}, status_code=200) + catalog = RestCatalog("rest", uri=TEST_URI, warehouse="s3://some-bucket") + catalog.create_namespace(namespace) + + assert ( + catalog._session.headers.get("Content-type") == "application/vnd.api+json" + ), "Expected 'Content-Type' header to be 'application/vnd.api+json'" + assert ( + requests_mock.last_request.headers["Content-type"] == "application/vnd.api+json" + ), "Create namespace request did not include expected 'Content-Type' header" + + def test_token_400(rest_mock: Mocker) -> None: rest_mock.post( f"{TEST_URI}v1/oauth/tokens", From 27b6fe777b4913d58923c5fdad51dd8974e86e39 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Wed, 28 Feb 2024 12:00:13 +0100 Subject: [PATCH 139/169] rest: Set OAuth Content-Type header explicitly (#478) --- pyiceberg/catalog/rest.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index ab3d22efba..6b952ef06d 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -290,8 +290,9 @@ def _fetch_access_token(self, session: Session, credential: str) -> str: else: client_id, client_secret = None, credential data = {GRANT_TYPE: CLIENT_CREDENTIALS, CLIENT_ID: client_id, CLIENT_SECRET: client_secret, SCOPE: CATALOG_SCOPE} - # Uses application/x-www-form-urlencoded by default - response = session.post(url=self.auth_url, data=data) + response = session.post( + url=self.auth_url, data=data, headers={**session.headers, "Content-type": "application/x-www-form-urlencoded"} + ) try: response.raise_for_status() except HTTPError as exc: From 6a34421763b26229b16d6c2d4f7b331f01ff4c3b Mon Sep 17 00:00:00 2001 From: Amogh Jahagirdar Date: Wed, 28 Feb 2024 06:54:33 -0800 Subject: [PATCH 140/169] Partition Evolution Support (#245) --- mkdocs/docs/api.md | 57 ++ pyiceberg/partitioning.py | 131 ++++- pyiceberg/table/__init__.py | 295 ++++++++++- pyiceberg/table/metadata.py | 3 +- tests/catalog/test_hive.py | 4 +- tests/integration/test_partition_evolution.py | 490 ++++++++++++++++++ 6 files changed, 966 insertions(+), 14 deletions(-) create mode 100644 tests/integration/test_partition_evolution.py diff --git a/mkdocs/docs/api.md b/mkdocs/docs/api.md index 53801922fc..724a45c52f 100644 --- a/mkdocs/docs/api.md +++ b/mkdocs/docs/api.md @@ -418,6 +418,63 @@ with table.update_schema(allow_incompatible_changes=True) as update: update.delete_column("some_field") ``` +## Partition evolution + +PyIceberg supports partition evolution. See the [partition evolution](https://iceberg.apache.org/spec/#partition-evolution) +for more details. + +The API to use when evolving partitions is the `update_spec` API on the table. + +```python +with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") +``` + +Updating the partition spec can also be done as part of a transaction with other operations. + +```python +with table.transaction() as transaction: + with transaction.update_spec() as update_spec: + update_spec.add_field("id", BucketTransform(16), "bucketed_id") + update_spec.add_field("event_ts", DayTransform(), "day_ts") + # ... Update properties etc +``` + +### Add fields + +New partition fields can be added via the `add_field` API which takes in the field name to partition on, +the partition transform, and an optional partition name. If the partition name is not specified, +one will be created. + +```python +with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + # identity is a shortcut API for adding an IdentityTransform + update.identity("some_field") +``` + +### Remove fields + +Partition fields can also be removed via the `remove_field` API if it no longer makes sense to partition on those fields. + +```python +with table.update_spec() as update:some_partition_name + # Remove the partition field with the name + update.remove_field("some_partition_name") +``` + +### Rename fields + +Partition fields can also be renamed via the `rename_field` API. + +```python +with table.update_spec() as update: + # Rename the partition field with the name bucketed_id to sharded_id + update.rename_field("bucketed_id", "sharded_id") +``` + ## Table properties Set and remove properties through the `Transaction` API: diff --git a/pyiceberg/partitioning.py b/pyiceberg/partitioning.py index f6307f0f8c..cd5a957b22 100644 --- a/pyiceberg/partitioning.py +++ b/pyiceberg/partitioning.py @@ -16,14 +16,9 @@ # under the License. from __future__ import annotations -from functools import cached_property -from typing import ( - Any, - Dict, - List, - Optional, - Tuple, -) +from abc import ABC, abstractmethod +from functools import cached_property, singledispatch +from typing import Any, Dict, Generic, List, Optional, Tuple, TypeVar from pydantic import ( BeforeValidator, @@ -34,7 +29,18 @@ from typing_extensions import Annotated from pyiceberg.schema import Schema -from pyiceberg.transforms import Transform, parse_transform +from pyiceberg.transforms import ( + BucketTransform, + DayTransform, + HourTransform, + IdentityTransform, + Transform, + TruncateTransform, + UnknownTransform, + VoidTransform, + YearTransform, + parse_transform, +) from pyiceberg.typedef import IcebergBaseModel from pyiceberg.types import NestedField, StructType @@ -143,7 +149,7 @@ def is_unpartitioned(self) -> bool: def last_assigned_field_id(self) -> int: if self.fields: return max(pf.field_id for pf in self.fields) - return PARTITION_FIELD_ID_START + return PARTITION_FIELD_ID_START - 1 @cached_property def source_id_to_fields_map(self) -> Dict[int, List[PartitionField]]: @@ -215,3 +221,108 @@ def assign_fresh_partition_spec_ids(spec: PartitionSpec, old_schema: Schema, fre ) ) return PartitionSpec(*partition_fields, spec_id=INITIAL_PARTITION_SPEC_ID) + + +T = TypeVar("T") + + +class PartitionSpecVisitor(Generic[T], ABC): + @abstractmethod + def identity(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit identity partition field.""" + + @abstractmethod + def bucket(self, field_id: int, source_name: str, source_id: int, num_buckets: int) -> T: + """Visit bucket partition field.""" + + @abstractmethod + def truncate(self, field_id: int, source_name: str, source_id: int, width: int) -> T: + """Visit truncate partition field.""" + + @abstractmethod + def year(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit year partition field.""" + + @abstractmethod + def month(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit month partition field.""" + + @abstractmethod + def day(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit day partition field.""" + + @abstractmethod + def hour(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit hour partition field.""" + + @abstractmethod + def always_null(self, field_id: int, source_name: str, source_id: int) -> T: + """Visit void partition field.""" + + @abstractmethod + def unknown(self, field_id: int, source_name: str, source_id: int, transform: str) -> T: + """Visit unknown partition field.""" + raise ValueError(f"Unknown transform is not supported: {transform}") + + +class _PartitionNameGenerator(PartitionSpecVisitor[str]): + def identity(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + + def bucket(self, field_id: int, source_name: str, source_id: int, num_buckets: int) -> str: + return f"{source_name}_bucket_{num_buckets}" + + def truncate(self, field_id: int, source_name: str, source_id: int, width: int) -> str: + return source_name + "_trunc_" + str(width) + + def year(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_year" + + def month(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_month" + + def day(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_day" + + def hour(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_hour" + + def always_null(self, field_id: int, source_name: str, source_id: int) -> str: + return source_name + "_null" + + def unknown(self, field_id: int, source_name: str, source_id: int, transform: str) -> str: + return super().unknown(field_id, source_name, source_id, transform) + + +R = TypeVar("R") + + +@singledispatch +def _visit(spec: PartitionSpec, schema: Schema, visitor: PartitionSpecVisitor[R]) -> List[R]: + return [_visit_partition_field(schema, field, visitor) for field in spec.fields] + + +def _visit_partition_field(schema: Schema, field: PartitionField, visitor: PartitionSpecVisitor[R]) -> R: + source_name = schema.find_column_name(field.source_id) + if not source_name: + raise ValueError(f"Could not find field with id {field.source_id}") + + transform = field.transform + if isinstance(transform, IdentityTransform): + return visitor.identity(field.field_id, source_name, field.source_id) + elif isinstance(transform, BucketTransform): + return visitor.bucket(field.field_id, source_name, field.source_id, transform.num_buckets) + elif isinstance(transform, TruncateTransform): + return visitor.truncate(field.field_id, source_name, field.source_id, transform.width) + elif isinstance(transform, DayTransform): + return visitor.day(field.field_id, source_name, field.source_id) + elif isinstance(transform, HourTransform): + return visitor.hour(field.field_id, source_name, field.source_id) + elif isinstance(transform, YearTransform): + return visitor.year(field.field_id, source_name, field.source_id) + elif isinstance(transform, VoidTransform): + return visitor.always_null(field.field_id, source_name, field.source_id) + elif isinstance(transform, UnknownTransform): + return visitor.unknown(field.field_id, source_name, field.source_id, repr(transform)) + else: + raise ValueError(f"Unknown transform {transform}") diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 060f13772b..e49f9400fe 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -51,6 +51,7 @@ And, BooleanExpression, EqualTo, + Reference, parser, visitors, ) @@ -67,7 +68,15 @@ write_manifest, write_manifest_list, ) -from pyiceberg.partitioning import PartitionSpec +from pyiceberg.partitioning import ( + INITIAL_PARTITION_SPEC_ID, + PARTITION_FIELD_ID_START, + IdentityTransform, + PartitionField, + PartitionSpec, + _PartitionNameGenerator, + _visit_partition_field, +) from pyiceberg.schema import ( PartnerAccessor, Schema, @@ -99,6 +108,7 @@ update_snapshot_summaries, ) from pyiceberg.table.sorting import SortOrder +from pyiceberg.transforms import TimeTransform, Transform, VoidTransform from pyiceberg.typedef import ( EMPTY_DICT, IcebergBaseModel, @@ -372,6 +382,14 @@ def update_snapshot(self) -> UpdateSnapshot: """ return UpdateSnapshot(self._table, self) + def update_spec(self) -> UpdateSpec: + """Create a new UpdateSpec to update the partitioning of the table. + + Returns: + A new UpdateSpec. + """ + return UpdateSpec(self._table, self) + def remove_properties(self, *removals: str) -> Transaction: """Remove properties. @@ -634,6 +652,43 @@ def _(update: SetCurrentSchemaUpdate, base_metadata: TableMetadata, context: _Ta return base_metadata.model_copy(update={"current_schema_id": new_schema_id}) +@_apply_table_update.register(AddPartitionSpecUpdate) +def _(update: AddPartitionSpecUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + for spec in base_metadata.partition_specs: + if spec.spec_id == update.spec.spec_id: + raise ValueError(f"Partition spec with id {spec.spec_id} already exists: {spec}") + context.add_update(update) + return base_metadata.model_copy( + update={ + "partition_specs": base_metadata.partition_specs + [update.spec], + "last_partition_id": max( + max(field.field_id for field in update.spec.fields), + base_metadata.last_partition_id or PARTITION_FIELD_ID_START - 1, + ), + } + ) + + +@_apply_table_update.register(SetDefaultSpecUpdate) +def _(update: SetDefaultSpecUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + new_spec_id = update.spec_id + if new_spec_id == -1: + new_spec_id = max(spec.spec_id for spec in base_metadata.partition_specs) + if new_spec_id == base_metadata.default_spec_id: + return base_metadata + found_spec_id = False + for spec in base_metadata.partition_specs: + found_spec_id = spec.spec_id == new_spec_id + if found_spec_id: + break + + if not found_spec_id: + raise ValueError(f"Failed to find spec with id {new_spec_id}") + + context.add_update(update) + return base_metadata.model_copy(update={"default_spec_id": new_spec_id}) + + @_apply_table_update.register(AddSnapshotUpdate) def _(update: AddSnapshotUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: if len(base_metadata.schemas) == 0: @@ -969,6 +1024,12 @@ def sort_orders(self) -> Dict[int, SortOrder]: """Return a dict of the sort orders of this table.""" return {sort_order.order_id: sort_order for sort_order in self.metadata.sort_orders} + def last_partition_id(self) -> int: + """Return the highest assigned partition field ID across all specs or 999 if only the unpartitioned spec exists.""" + if self.metadata.last_partition_id: + return self.metadata.last_partition_id + return PARTITION_FIELD_ID_START - 1 + @property def properties(self) -> Dict[str, str]: """Properties of the table.""" @@ -1095,6 +1156,9 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T for data_file in data_files: update_snapshot.append_data_file(data_file) + def update_spec(self, case_sensitive: bool = True) -> UpdateSpec: + return UpdateSpec(self, case_sensitive=case_sensitive) + def refs(self) -> Dict[str, SnapshotRef]: """Return the snapshot references in the table.""" return self.metadata.refs @@ -2655,3 +2719,232 @@ def overwrite(self) -> OverwriteFiles: operation=Operation.OVERWRITE if self._table.current_snapshot() is not None else Operation.APPEND, transaction=self._transaction, ) + + +class UpdateSpec: + _table: Table + _name_to_field: Dict[str, PartitionField] = {} + _name_to_added_field: Dict[str, PartitionField] = {} + _transform_to_field: Dict[Tuple[int, str], PartitionField] = {} + _transform_to_added_field: Dict[Tuple[int, str], PartitionField] = {} + _renames: Dict[str, str] = {} + _added_time_fields: Dict[int, PartitionField] = {} + _case_sensitive: bool + _adds: List[PartitionField] + _deletes: Set[int] + _last_assigned_partition_id: int + _transaction: Optional[Transaction] + + def __init__(self, table: Table, transaction: Optional[Transaction] = None, case_sensitive: bool = True) -> None: + self._table = table + self._name_to_field = {field.name: field for field in table.spec().fields} + self._name_to_added_field = {} + self._transform_to_field = {(field.source_id, repr(field.transform)): field for field in table.spec().fields} + self._transform_to_added_field = {} + self._adds = [] + self._deletes = set() + self._last_assigned_partition_id = table.last_partition_id() + self._renames = {} + self._transaction = transaction + self._case_sensitive = case_sensitive + self._added_time_fields = {} + + def add_field( + self, + source_column_name: str, + transform: Transform[Any, Any], + partition_field_name: Optional[str] = None, + ) -> UpdateSpec: + ref = Reference(source_column_name) + bound_ref = ref.bind(self._table.schema(), self._case_sensitive) + # verify transform can actually bind it + output_type = bound_ref.field.field_type + if not transform.can_transform(output_type): + raise ValueError(f"{transform} cannot transform {output_type} values from {bound_ref.field.name}") + + transform_key = (bound_ref.field.field_id, repr(transform)) + existing_partition_field = self._transform_to_field.get(transform_key) + if existing_partition_field and self._is_duplicate_partition(transform, existing_partition_field): + raise ValueError(f"Duplicate partition field for ${ref.name}=${ref}, ${existing_partition_field} already exists") + + added = self._transform_to_added_field.get(transform_key) + if added: + raise ValueError(f"Already added partition: {added.name}") + + new_field = self._partition_field((bound_ref.field.field_id, transform), partition_field_name) + if new_field.name in self._name_to_added_field: + raise ValueError(f"Already added partition field with name: {new_field.name}") + + if isinstance(new_field.transform, TimeTransform): + existing_time_field = self._added_time_fields.get(new_field.source_id) + if existing_time_field: + raise ValueError(f"Cannot add time partition field: {new_field.name} conflicts with {existing_time_field.name}") + self._added_time_fields[new_field.source_id] = new_field + self._transform_to_added_field[transform_key] = new_field + + existing_partition_field = self._name_to_field.get(new_field.name) + if existing_partition_field and new_field.field_id not in self._deletes: + if isinstance(existing_partition_field.transform, VoidTransform): + self.rename_field( + existing_partition_field.name, existing_partition_field.name + "_" + str(existing_partition_field.field_id) + ) + else: + raise ValueError(f"Cannot add duplicate partition field name: {existing_partition_field.name}") + + self._name_to_added_field[new_field.name] = new_field + self._adds.append(new_field) + return self + + def add_identity(self, source_column_name: str) -> UpdateSpec: + return self.add_field(source_column_name, IdentityTransform(), None) + + def remove_field(self, name: str) -> UpdateSpec: + added = self._name_to_added_field.get(name) + if added: + raise ValueError(f"Cannot delete newly added field {name}") + renamed = self._renames.get(name) + if renamed: + raise ValueError(f"Cannot rename and delete field {name}") + field = self._name_to_field.get(name) + if not field: + raise ValueError(f"No such partition field: {name}") + + self._deletes.add(field.field_id) + return self + + def rename_field(self, name: str, new_name: str) -> UpdateSpec: + existing_field = self._name_to_field.get(new_name) + if existing_field and isinstance(existing_field.transform, VoidTransform): + return self.rename_field(name, name + "_" + str(existing_field.field_id)) + added = self._name_to_added_field.get(name) + if added: + raise ValueError("Cannot rename recently added partitions") + field = self._name_to_field.get(name) + if not field: + raise ValueError(f"Cannot find partition field {name}") + if field.field_id in self._deletes: + raise ValueError(f"Cannot delete and rename partition field {name}") + self._renames[name] = new_name + return self + + def commit(self) -> None: + new_spec = self._apply() + if self._table.metadata.default_spec_id != new_spec.spec_id: + if new_spec.spec_id not in self._table.specs(): + updates = [AddPartitionSpecUpdate(spec=new_spec), SetDefaultSpecUpdate(spec_id=-1)] + else: + updates = [SetDefaultSpecUpdate(spec_id=new_spec.spec_id)] + + required_last_assigned_partitioned_id = self._table.last_partition_id() + requirements = [AssertLastAssignedPartitionId(last_assigned_partition_id=required_last_assigned_partitioned_id)] + + if self._transaction is not None: + self._transaction._append_updates(*updates) # pylint: disable=W0212 + self._transaction._append_requirements(*requirements) # pylint: disable=W0212 + else: + requirements.append(AssertDefaultSpecId(default_spec_id=self._table.spec().spec_id)) + self._table._do_commit(updates=tuple(updates), requirements=tuple(requirements)) # pylint: disable=W0212 + + def __exit__(self, _: Any, value: Any, traceback: Any) -> None: + """Close and commit the change.""" + return self.commit() + + def __enter__(self) -> UpdateSpec: + """Update the table.""" + return self + + def _apply(self) -> PartitionSpec: + def _check_and_add_partition_name(schema: Schema, name: str, source_id: int, partition_names: Set[str]) -> None: + try: + field = schema.find_field(name) + except ValueError: + field = None + + if source_id is not None and field is not None and field.field_id != source_id: + raise ValueError(f"Cannot create identity partition from a different field in the schema {name}") + elif field is not None and source_id != field.field_id: + raise ValueError(f"Cannot create partition from name that exists in schema {name}") + if not name: + raise ValueError("Undefined name") + if name in partition_names: + raise ValueError(f"Partition name has to be unique: {name}") + partition_names.add(name) + + def _add_new_field( + schema: Schema, source_id: int, field_id: int, name: str, transform: Transform[Any, Any], partition_names: Set[str] + ) -> PartitionField: + _check_and_add_partition_name(schema, name, source_id, partition_names) + return PartitionField(source_id, field_id, transform, name) + + partition_fields = [] + partition_names: Set[str] = set() + for field in self._table.spec().fields: + if field.field_id not in self._deletes: + renamed = self._renames.get(field.name) + if renamed: + new_field = _add_new_field( + self._table.schema(), field.source_id, field.field_id, renamed, field.transform, partition_names + ) + else: + new_field = _add_new_field( + self._table.schema(), field.source_id, field.field_id, field.name, field.transform, partition_names + ) + partition_fields.append(new_field) + elif self._table.format_version == 1: + renamed = self._renames.get(field.name) + if renamed: + new_field = _add_new_field( + self._table.schema(), field.source_id, field.field_id, renamed, VoidTransform(), partition_names + ) + else: + new_field = _add_new_field( + self._table.schema(), field.source_id, field.field_id, field.name, VoidTransform(), partition_names + ) + + partition_fields.append(new_field) + + for added_field in self._adds: + new_field = PartitionField( + source_id=added_field.source_id, + field_id=added_field.field_id, + transform=added_field.transform, + name=added_field.name, + ) + partition_fields.append(new_field) + + # Reuse spec id or create a new one. + new_spec = PartitionSpec(*partition_fields) + new_spec_id = INITIAL_PARTITION_SPEC_ID + for spec in self._table.specs().values(): + if new_spec.compatible_with(spec): + new_spec_id = spec.spec_id + break + elif new_spec_id <= spec.spec_id: + new_spec_id = spec.spec_id + 1 + return PartitionSpec(*partition_fields, spec_id=new_spec_id) + + def _partition_field(self, transform_key: Tuple[int, Transform[Any, Any]], name: Optional[str]) -> PartitionField: + if self._table.metadata.format_version == 2: + source_id, transform = transform_key + historical_fields = [] + for spec in self._table.specs().values(): + for field in spec.fields: + historical_fields.append((field.source_id, field.field_id, repr(field.transform), field.name)) + + for field_key in historical_fields: + if field_key[0] == source_id and field_key[2] == repr(transform): + if name is None or field_key[3] == name: + return PartitionField(source_id, field_key[1], transform, name) + + new_field_id = self._new_field_id() + if name is None: + tmp_field = PartitionField(transform_key[0], new_field_id, transform_key[1], 'unassigned_field_name') + name = _visit_partition_field(self._table.schema(), tmp_field, _PartitionNameGenerator()) + return PartitionField(transform_key[0], new_field_id, transform_key[1], name) + + def _new_field_id(self) -> int: + self._last_assigned_partition_id += 1 + return self._last_assigned_partition_id + + def _is_duplicate_partition(self, transform: Transform[Any, Any], partition_field: PartitionField) -> bool: + return partition_field.field_id not in self._deletes and partition_field.transform == transform diff --git a/pyiceberg/table/metadata.py b/pyiceberg/table/metadata.py index a5dfb6ce4c..ea7a02f715 100644 --- a/pyiceberg/table/metadata.py +++ b/pyiceberg/table/metadata.py @@ -310,7 +310,8 @@ def construct_partition_specs(cls, data: Dict[str, Any]) -> Dict[str, Any]: data[PARTITION_SPECS] = [{"field-id": 0, "fields": ()}] data[LAST_PARTITION_ID] = max( - [field.get(FIELD_ID) for spec in data[PARTITION_SPECS] for field in spec[FIELDS]], default=PARTITION_FIELD_ID_START + [field.get(FIELD_ID) for spec in data[PARTITION_SPECS] for field in spec[FIELDS]], + default=PARTITION_FIELD_ID_START - 1, ) return data diff --git a/tests/catalog/test_hive.py b/tests/catalog/test_hive.py index dc2689e0d8..e59b7599bc 100644 --- a/tests/catalog/test_hive.py +++ b/tests/catalog/test_hive.py @@ -277,7 +277,7 @@ def test_create_table(table_schema_simple: Schema, hive_database: HiveDatabase, ) ], current_schema_id=0, - last_partition_id=1000, + last_partition_id=999, properties={"owner": "javaberg", 'write.parquet.compression-codec': 'zstd'}, partition_specs=[PartitionSpec()], default_spec_id=0, @@ -330,7 +330,7 @@ def test_create_v1_table(table_schema_simple: Schema, hive_database: HiveDatabas schema=expected_schema, schemas=[expected_schema], current_schema_id=0, - last_partition_id=1000, + last_partition_id=999, properties={"owner": "javaberg", "write.parquet.compression-codec": "zstd"}, partition_spec=[], partition_specs=[expected_spec], diff --git a/tests/integration/test_partition_evolution.py b/tests/integration/test_partition_evolution.py new file mode 100644 index 0000000000..16feef565d --- /dev/null +++ b/tests/integration/test_partition_evolution.py @@ -0,0 +1,490 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint:disable=redefined-outer-name + +import pytest + +from pyiceberg.catalog import Catalog, load_catalog +from pyiceberg.exceptions import NoSuchTableError +from pyiceberg.partitioning import PartitionField, PartitionSpec +from pyiceberg.schema import Schema +from pyiceberg.table import Table +from pyiceberg.transforms import ( + BucketTransform, + DayTransform, + HourTransform, + IdentityTransform, + MonthTransform, + TruncateTransform, + VoidTransform, + YearTransform, +) +from pyiceberg.types import ( + LongType, + NestedField, + StringType, + TimestampType, +) + + +@pytest.fixture() +def catalog_rest() -> Catalog: + return load_catalog( + "local", + **{ + "type": "rest", + "uri": "http://localhost:8181", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) + + +@pytest.fixture() +def catalog_hive() -> Catalog: + return load_catalog( + "local", + **{ + "type": "hive", + "uri": "http://localhost:9083", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) + + +def _simple_table(catalog: Catalog, table_schema_simple: Schema) -> Table: + return _create_table_with_schema(catalog, table_schema_simple, "1") + + +def _table(catalog: Catalog) -> Table: + schema_with_timestamp = Schema( + NestedField(1, "id", LongType(), required=False), + NestedField(2, "event_ts", TimestampType(), required=False), + NestedField(3, "str", StringType(), required=False), + ) + return _create_table_with_schema(catalog, schema_with_timestamp, "1") + + +def _table_v2(catalog: Catalog) -> Table: + schema_with_timestamp = Schema( + NestedField(1, "id", LongType(), required=False), + NestedField(2, "event_ts", TimestampType(), required=False), + NestedField(3, "str", StringType(), required=False), + ) + return _create_table_with_schema(catalog, schema_with_timestamp, "2") + + +def _create_table_with_schema(catalog: Catalog, schema: Schema, format_version: str) -> Table: + tbl_name = "default.test_schema_evolution" + try: + catalog.drop_table(tbl_name) + except NoSuchTableError: + pass + return catalog.create_table(identifier=tbl_name, schema=schema, properties={"format-version": format_version}) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_identity_partition(catalog: Catalog, table_schema_simple: Schema) -> None: + simple_table = _simple_table(catalog, table_schema_simple) + simple_table.update_spec().add_identity("foo").commit() + specs = simple_table.specs() + assert len(specs) == 2 + spec = simple_table.spec() + assert spec.spec_id == 1 + assert spec.last_assigned_field_id == 1000 + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_year(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", YearTransform(), "year_transform").commit() + _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, YearTransform(), "year_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_month(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", MonthTransform(), "month_transform").commit() + _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, MonthTransform(), "month_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_day(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", DayTransform(), "day_transform").commit() + _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, DayTransform(), "day_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_hour(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", HourTransform(), "hour_transform").commit() + _validate_new_partition_fields(table, 1000, 1, 1000, PartitionField(2, 1000, HourTransform(), "hour_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_bucket(catalog: Catalog, table_schema_simple: Schema) -> None: + simple_table = _create_table_with_schema(catalog, table_schema_simple, "1") + simple_table.update_spec().add_field("foo", BucketTransform(12), "bucket_transform").commit() + _validate_new_partition_fields(simple_table, 1000, 1, 1000, PartitionField(1, 1000, BucketTransform(12), "bucket_transform")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_truncate(catalog: Catalog, table_schema_simple: Schema) -> None: + simple_table = _create_table_with_schema(catalog, table_schema_simple, "1") + simple_table.update_spec().add_field("foo", TruncateTransform(1), "truncate_transform").commit() + _validate_new_partition_fields( + simple_table, 1000, 1, 1000, PartitionField(1, 1000, TruncateTransform(1), "truncate_transform") + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_multiple_adds(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_identity("id").add_field("event_ts", HourTransform(), "hourly_partitioned").add_field( + "str", TruncateTransform(2), "truncate_str" + ).commit() + _validate_new_partition_fields( + table, + 1002, + 1, + 1002, + PartitionField(1, 1000, IdentityTransform(), "id"), + PartitionField(2, 1001, HourTransform(), "hourly_partitioned"), + PartitionField(3, 1002, TruncateTransform(2), "truncate_str"), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_hour_to_day(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("event_ts", DayTransform(), "daily_partitioned").commit() + table.update_spec().add_field("event_ts", HourTransform(), "hourly_partitioned").commit() + _validate_new_partition_fields( + table, + 1001, + 2, + 1001, + PartitionField(2, 1000, DayTransform(), "daily_partitioned"), + PartitionField(2, 1001, HourTransform(), "hourly_partitioned"), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_add_multiple_buckets(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_field("id", BucketTransform(16)).add_field("id", BucketTransform(4)).commit() + _validate_new_partition_fields( + table, + 1001, + 1, + 1001, + PartitionField(1, 1000, BucketTransform(16), "id_bucket_16"), + PartitionField(1, 1001, BucketTransform(4), "id_bucket_4"), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_identity(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_identity("id").commit() + table.update_spec().remove_field("id").commit() + assert len(table.specs()) == 3 + assert table.spec().spec_id == 2 + assert table.spec() == PartitionSpec( + PartitionField(source_id=1, field_id=1000, transform=VoidTransform(), name='id'), spec_id=2 + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_identity_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + table_v2.update_spec().add_identity("id").commit() + table_v2.update_spec().remove_field("id").commit() + assert len(table_v2.specs()) == 2 + assert table_v2.spec().spec_id == 0 + assert table_v2.spec() == PartitionSpec(spec_id=0) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_bucket(catalog: Catalog) -> None: + table = _table(catalog) + with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table.update_spec() as remove: + remove.remove_field("bucketed_id") + + assert len(table.specs()) == 3 + _validate_new_partition_fields( + table, + 1001, + 2, + 1001, + PartitionField(source_id=1, field_id=1000, transform=VoidTransform(), name='bucketed_id'), + PartitionField(source_id=2, field_id=1001, transform=DayTransform(), name='day_ts'), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_bucket_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table_v2.update_spec() as remove: + remove.remove_field("bucketed_id") + assert len(table_v2.specs()) == 3 + _validate_new_partition_fields( + table_v2, 1001, 2, 1001, PartitionField(source_id=2, field_id=1001, transform=DayTransform(), name='day_ts') + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_day(catalog: Catalog) -> None: + table = _table(catalog) + with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table.update_spec() as remove: + remove.remove_field("day_ts") + + assert len(table.specs()) == 3 + _validate_new_partition_fields( + table, + 1001, + 2, + 1001, + PartitionField(source_id=1, field_id=1000, transform=BucketTransform(16), name='bucketed_id'), + PartitionField(source_id=2, field_id=1001, transform=VoidTransform(), name='day_ts'), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_remove_day_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table_v2.update_spec() as remove: + remove.remove_field("day_ts") + assert len(table_v2.specs()) == 3 + _validate_new_partition_fields( + table_v2, 1000, 2, 1001, PartitionField(source_id=1, field_id=1000, transform=BucketTransform(16), name='bucketed_id') + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_rename(catalog: Catalog) -> None: + table = _table(catalog) + table.update_spec().add_identity("id").commit() + table.update_spec().rename_field("id", "sharded_id").commit() + assert len(table.specs()) == 3 + assert table.spec().spec_id == 2 + _validate_new_partition_fields(table, 1000, 2, 1000, PartitionField(1, 1000, IdentityTransform(), "sharded_id")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_and_remove(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_identity("id").remove_field("id").commit() + assert "Cannot delete newly added field id" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_redundant_time_partition(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_field("event_ts", YearTransform(), "year_transform").add_field( + "event_ts", HourTransform(), "hour_transform" + ).commit() + assert "Cannot add time partition field: hour_transform conflicts with year_transform" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_delete_and_rename(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_identity("id").commit() + table.update_spec().remove_field("id").rename_field("id", "sharded_id").commit() + assert "Cannot delete and rename partition field id" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_rename_and_delete(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_identity("id").commit() + table.update_spec().rename_field("id", "sharded_id").remove_field("id").commit() + assert "Cannot rename and delete field id" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_same_tranform_for_same_field(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_field("str", TruncateTransform(4), "truncated_str").add_field( + "str", TruncateTransform(4) + ).commit() + assert "Already added partition" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_same_field_multiple_times(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_field("id", IdentityTransform(), "duplicate").add_field( + "id", IdentityTransform(), "duplicate" + ).commit() + assert "Already added partition" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_cannot_add_multiple_specs_same_name(catalog: Catalog) -> None: + table = _table(catalog) + with pytest.raises(ValueError) as exc_info: + table.update_spec().add_field("id", IdentityTransform(), "duplicate").add_field( + "event_ts", IdentityTransform(), "duplicate" + ).commit() + assert "Already added partition" in str(exc_info.value) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_change_specs_and_schema_transaction(catalog: Catalog) -> None: + table = _table(catalog) + with table.transaction() as transaction: + with transaction.update_spec() as update_spec: + update_spec.add_identity("id").add_field("event_ts", HourTransform(), "hourly_partitioned").add_field( + "str", TruncateTransform(2), "truncate_str" + ) + + with transaction.update_schema() as update_schema: + update_schema.add_column("col_string", StringType()) + + _validate_new_partition_fields( + table, + 1002, + 1, + 1002, + PartitionField(1, 1000, IdentityTransform(), "id"), + PartitionField(2, 1001, HourTransform(), "hourly_partitioned"), + PartitionField(3, 1002, TruncateTransform(2), "truncate_str"), + ) + + assert table.schema() == Schema( + NestedField(field_id=1, name='id', field_type=LongType(), required=False), + NestedField(field_id=2, name='event_ts', field_type=TimestampType(), required=False), + NestedField(field_id=3, name='str', field_type=StringType(), required=False), + NestedField(field_id=4, name='col_string', field_type=StringType(), required=False), + schema_id=1, + identifier_field_ids=[], + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_multiple_adds_and_remove_v1(catalog: Catalog) -> None: + table = _table(catalog) + with table.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table.update_spec() as update: + update.remove_field("day_ts").remove_field("bucketed_id") + with table.update_spec() as update: + update.add_field("str", TruncateTransform(2), "truncated_str") + _validate_new_partition_fields( + table, + 1002, + 3, + 1002, + PartitionField(1, 1000, VoidTransform(), "bucketed_id"), + PartitionField(2, 1001, VoidTransform(), "day_ts"), + PartitionField(3, 1002, TruncateTransform(2), "truncated_str"), + ) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_multiple_adds_and_remove_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table_v2.update_spec() as update: + update.remove_field("day_ts").remove_field("bucketed_id") + with table_v2.update_spec() as update: + update.add_field("str", TruncateTransform(2), "truncated_str") + _validate_new_partition_fields(table_v2, 1002, 2, 1002, PartitionField(3, 1002, TruncateTransform(2), "truncated_str")) + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_multiple_remove_and_add_reuses_v2(catalog: Catalog) -> None: + table_v2 = _table_v2(catalog) + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + update.add_field("event_ts", DayTransform(), "day_ts") + with table_v2.update_spec() as update: + update.remove_field("day_ts").remove_field("bucketed_id") + with table_v2.update_spec() as update: + update.add_field("id", BucketTransform(16), "bucketed_id") + _validate_new_partition_fields(table_v2, 1000, 2, 1001, PartitionField(1, 1000, BucketTransform(16), "bucketed_id")) + + +def _validate_new_partition_fields( + table: Table, + expected_spec_last_assigned_field_id: int, + expected_spec_id: int, + expected_metadata_last_assigned_field_id: int, + *expected_partition_fields: PartitionField, +) -> None: + spec = table.spec() + assert spec.spec_id == expected_spec_id + assert spec.last_assigned_field_id == expected_spec_last_assigned_field_id + assert table.last_partition_id() == expected_metadata_last_assigned_field_id + assert len(spec.fields) == len(expected_partition_fields) + for i in range(len(spec.fields)): + assert spec.fields[i] == expected_partition_fields[i] From 1f433a46ca59a896014369acfd8187bed4ca4fa2 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 29 Feb 2024 09:31:29 +0100 Subject: [PATCH 141/169] Fix retrying logic (#480) --- pyiceberg/catalog/rest.py | 8 +++--- tests/catalog/test_rest.py | 58 +++++++++++++++++++++++++------------- 2 files changed, 43 insertions(+), 23 deletions(-) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 6b952ef06d..3132af9fcd 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -128,7 +128,7 @@ def _retry_hook(retry_state: RetryCallState) -> None: _RETRY_ARGS = { "retry": retry_if_exception_type(AuthorizationExpiredError), "stop": stop_after_attempt(2), - "before": _retry_hook, + "before_sleep": _retry_hook, "reraise": True, } @@ -446,10 +446,10 @@ def _response_to_table(self, identifier_tuple: Tuple[str, ...], table_response: catalog=self, ) - def _refresh_token(self, session: Optional[Session] = None, new_token: Optional[str] = None) -> None: + def _refresh_token(self, session: Optional[Session] = None, initial_token: Optional[str] = None) -> None: session = session or self._session - if new_token is not None: - self.properties[TOKEN] = new_token + if initial_token is not None: + self.properties[TOKEN] = initial_token elif CREDENTIAL in self.properties: self.properties[TOKEN] = self._fetch_access_token(session, self.properties[CREDENTIAL]) diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index fa35ec4548..21b4d95556 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -306,24 +306,39 @@ def test_list_namespace_with_parent_200(rest_mock: Mocker) -> None: ] -def test_list_namespaces_419(rest_mock: Mocker) -> None: +def test_list_namespaces_token_expired(rest_mock: Mocker) -> None: new_token = "new_jwt_token" new_header = dict(TEST_HEADERS) new_header["Authorization"] = f"Bearer {new_token}" - rest_mock.post( + namespaces = rest_mock.register_uri( + "GET", f"{TEST_URI}v1/namespaces", - json={ - "error": { - "message": "Authorization expired.", - "type": "AuthorizationExpiredError", - "code": 419, - } - }, - status_code=419, - request_headers=TEST_HEADERS, - ) - rest_mock.post( + [ + { + "status_code": 419, + "json": { + "error": { + "message": "Authorization expired.", + "type": "AuthorizationExpiredError", + "code": 419, + } + }, + "headers": TEST_HEADERS, + }, + { + "status_code": 200, + "json": {"namespaces": [["default"], ["examples"], ["fokko"], ["system"]]}, + "headers": new_header, + }, + { + "status_code": 200, + "json": {"namespaces": [["default"], ["examples"], ["fokko"], ["system"]]}, + "headers": new_header, + }, + ], + ) + tokens = rest_mock.post( f"{TEST_URI}v1/oauth/tokens", json={ "access_token": new_token, @@ -333,12 +348,6 @@ def test_list_namespaces_419(rest_mock: Mocker) -> None: }, status_code=200, ) - rest_mock.get( - f"{TEST_URI}v1/namespaces", - json={"namespaces": [["default"], ["examples"], ["fokko"], ["system"]]}, - status_code=200, - request_headers=new_header, - ) catalog = RestCatalog("rest", uri=TEST_URI, token=TEST_TOKEN, credential=TEST_CREDENTIALS) assert catalog.list_namespaces() == [ ("default",), @@ -346,6 +355,17 @@ def test_list_namespaces_419(rest_mock: Mocker) -> None: ("fokko",), ("system",), ] + assert namespaces.call_count == 2 + assert tokens.call_count == 1 + + assert catalog.list_namespaces() == [ + ("default",), + ("examples",), + ("fokko",), + ("system",), + ] + assert namespaces.call_count == 3 + assert tokens.call_count == 1 def test_create_namespace_200(rest_mock: Mocker) -> None: From afd7bf2abccb8d09e5dfa4b1e64663e3dd5ab762 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Thu, 29 Feb 2024 00:43:04 -0800 Subject: [PATCH 142/169] Remove unused catalog from integration test (#481) --- tests/integration/test_writes.py | 24 +----------------------- 1 file changed, 1 insertion(+), 23 deletions(-) diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index fa5e93d925..a16ba48a27 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -33,7 +33,7 @@ from pyiceberg.catalog import Catalog, Properties, Table, load_catalog from pyiceberg.catalog.sql import SqlCatalog -from pyiceberg.exceptions import NamespaceAlreadyExistsError, NoSuchTableError +from pyiceberg.exceptions import NoSuchTableError from pyiceberg.schema import Schema from pyiceberg.table import _dataframe_to_data_files from pyiceberg.types import ( @@ -51,28 +51,6 @@ TimestamptzType, ) - -@pytest.fixture() -def catalog() -> Catalog: - catalog = load_catalog( - "local", - **{ - "type": "rest", - "uri": "http://localhost:8181", - "s3.endpoint": "http://localhost:9000", - "s3.access-key-id": "admin", - "s3.secret-access-key": "password", - }, - ) - - try: - catalog.create_namespace("default") - except NamespaceAlreadyExistsError: - pass - - return catalog - - TEST_DATA_WITH_NULL = { 'bool': [False, None, True], 'string': ['a', None, 'z'], From a1c6a5a9c8015983064519697fd7763d22805c4f Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Thu, 29 Feb 2024 02:50:53 -0800 Subject: [PATCH 143/169] Github Action to check links in documentation (#324) * add github add to check md link * Only run under `mkdocs/**` * ws * make lint --------- Co-authored-by: Fokko Driesprong --- .github/workflows/check-md-link.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .github/workflows/check-md-link.yml diff --git a/.github/workflows/check-md-link.yml b/.github/workflows/check-md-link.yml new file mode 100644 index 0000000000..b91195e98a --- /dev/null +++ b/.github/workflows/check-md-link.yml @@ -0,0 +1,13 @@ +name: Check Markdown links + +on: + push: + paths: + - mkdocs/** + +jobs: + markdown-link-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + - uses: gaurav-nelson/github-action-markdown-link-check@v1 From 0d22add2f81b8236aa031615d2df7395a63187b0 Mon Sep 17 00:00:00 2001 From: Anupam Saini <4581797+anupam-saini@users.noreply.github.com> Date: Thu, 29 Feb 2024 06:13:36 -0500 Subject: [PATCH 144/169] Add logic for Sort Order updates (#476) * Implement sort order update * Cleanup * Add test * Add test * Update pyiceberg/table/metadata.py Co-authored-by: Fokko Driesprong * Lint * Nits * Nits --------- Co-authored-by: Fokko Driesprong --- pyiceberg/table/__init__.py | 37 +++++++++++++++++++++++++++++++++++++ pyiceberg/table/metadata.py | 4 ++++ pyiceberg/table/sorting.py | 6 ++++-- tests/table/test_init.py | 22 ++++++++++++++++++++++ 4 files changed, 67 insertions(+), 2 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index e49f9400fe..e29369a26c 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -560,6 +560,13 @@ def is_added_schema(self, schema_id: int) -> bool: update.schema_.schema_id == schema_id for update in self._updates if update.action == TableUpdateAction.add_schema ) + def is_added_sort_order(self, sort_order_id: int) -> bool: + return any( + update.sort_order.order_id == sort_order_id + for update in self._updates + if update.action == TableUpdateAction.add_sort_order + ) + @singledispatch def _apply_table_update(update: TableUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: @@ -759,6 +766,36 @@ def _(update: SetSnapshotRefUpdate, base_metadata: TableMetadata, context: _Tabl return base_metadata.model_copy(update=metadata_updates) +@_apply_table_update.register(AddSortOrderUpdate) +def _(update: AddSortOrderUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + context.add_update(update) + return base_metadata.model_copy( + update={ + "sort_orders": base_metadata.sort_orders + [update.sort_order], + } + ) + + +@_apply_table_update.register(SetDefaultSortOrderUpdate) +def _(update: SetDefaultSortOrderUpdate, base_metadata: TableMetadata, context: _TableMetadataUpdateContext) -> TableMetadata: + new_sort_order_id = update.sort_order_id + if new_sort_order_id == -1: + # The last added sort order should be in base_metadata.sort_orders at this point + new_sort_order_id = max(sort_order.order_id for sort_order in base_metadata.sort_orders) + if not context.is_added_sort_order(new_sort_order_id): + raise ValueError("Cannot set current sort order to the last added one when no sort order has been added") + + if new_sort_order_id == base_metadata.default_sort_order_id: + return base_metadata + + sort_order = base_metadata.sort_order_by_id(new_sort_order_id) + if sort_order is None: + raise ValueError(f"Sort order with id {new_sort_order_id} does not exist") + + context.add_update(update) + return base_metadata.model_copy(update={"default_sort_order_id": new_sort_order_id}) + + def update_table_metadata(base_metadata: TableMetadata, updates: Tuple[TableUpdate, ...]) -> TableMetadata: """Update the table metadata with the given updates in one transaction. diff --git a/pyiceberg/table/metadata.py b/pyiceberg/table/metadata.py index ea7a02f715..2fd8b13af4 100644 --- a/pyiceberg/table/metadata.py +++ b/pyiceberg/table/metadata.py @@ -226,6 +226,10 @@ def schema_by_id(self, schema_id: int) -> Optional[Schema]: """Get the schema by schema_id.""" return next((schema for schema in self.schemas if schema.schema_id == schema_id), None) + def sort_order_by_id(self, sort_order_id: int) -> Optional[SortOrder]: + """Get the sort order by sort_order_id.""" + return next((sort_order for sort_order in self.sort_orders if sort_order.order_id == sort_order_id), None) + class TableMetadataV1(TableMetadataCommonFields, IcebergBaseModel): """Represents version 1 of the Table Metadata. diff --git a/pyiceberg/table/sorting.py b/pyiceberg/table/sorting.py index f970d687fa..f74d5bc701 100644 --- a/pyiceberg/table/sorting.py +++ b/pyiceberg/table/sorting.py @@ -168,7 +168,9 @@ def __repr__(self) -> str: UNSORTED_SORT_ORDER = SortOrder(order_id=UNSORTED_SORT_ORDER_ID) -def assign_fresh_sort_order_ids(sort_order: SortOrder, old_schema: Schema, fresh_schema: Schema) -> SortOrder: +def assign_fresh_sort_order_ids( + sort_order: SortOrder, old_schema: Schema, fresh_schema: Schema, sort_order_id: int = INITIAL_SORT_ORDER_ID +) -> SortOrder: if sort_order.is_unsorted: return UNSORTED_SORT_ORDER @@ -189,4 +191,4 @@ def assign_fresh_sort_order_ids(sort_order: SortOrder, old_schema: Schema, fresh ) ) - return SortOrder(*fresh_fields, order_id=INITIAL_SORT_ORDER_ID) + return SortOrder(*fresh_fields, order_id=sort_order_id) diff --git a/tests/table/test_init.py b/tests/table/test_init.py index 7bf65a67da..39aa72f8db 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -43,6 +43,7 @@ from pyiceberg.schema import Schema from pyiceberg.table import ( AddSnapshotUpdate, + AddSortOrderUpdate, AssertCreate, AssertCurrentSchemaId, AssertDefaultSortOrderId, @@ -52,6 +53,7 @@ AssertRefSnapshotId, AssertTableUUID, RemovePropertiesUpdate, + SetDefaultSortOrderUpdate, SetPropertiesUpdate, SetSnapshotRefUpdate, SnapshotRef, @@ -664,6 +666,26 @@ def test_update_metadata_set_snapshot_ref(table_v2: Table) -> None: ) +def test_update_metadata_add_update_sort_order(table_v2: Table) -> None: + new_sort_order = SortOrder(order_id=table_v2.sort_order().order_id + 1) + new_metadata = update_table_metadata( + table_v2.metadata, + (AddSortOrderUpdate(sort_order=new_sort_order), SetDefaultSortOrderUpdate(sort_order_id=-1)), + ) + assert len(new_metadata.sort_orders) == 2 + assert new_metadata.sort_orders[-1] == new_sort_order + assert new_metadata.default_sort_order_id == new_sort_order.order_id + + +def test_update_metadata_update_sort_order_invalid(table_v2: Table) -> None: + with pytest.raises(ValueError, match="Cannot set current sort order to the last added one when no sort order has been added"): + update_table_metadata(table_v2.metadata, (SetDefaultSortOrderUpdate(sort_order_id=-1),)) + + invalid_order_id = 10 + with pytest.raises(ValueError, match=f"Sort order with id {invalid_order_id} does not exist"): + update_table_metadata(table_v2.metadata, (SetDefaultSortOrderUpdate(sort_order_id=invalid_order_id),)) + + def test_update_metadata_with_multiple_updates(table_v1: Table) -> None: base_metadata = table_v1.metadata transaction = table_v1.transaction() From 9a5bb07ca1a2ddda04310a23a2136fc6ae9a7515 Mon Sep 17 00:00:00 2001 From: Yufei Gu Date: Thu, 29 Feb 2024 03:39:27 -0800 Subject: [PATCH 145/169] Make `issued_token_type` optional to support OAuth2 Client Credential Flow (#466) * Make issued_token_type optional to support OAuth2 Client Credential Flow * Fix the style issue * Resolve comments * Resolve comments --------- Co-authored-by: yufei --- pyiceberg/catalog/rest.py | 6 ++++-- tests/catalog/test_rest.py | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 3132af9fcd..e0e88ec450 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -157,8 +157,10 @@ class RegisterTableRequest(IcebergBaseModel): class TokenResponse(IcebergBaseModel): access_token: str = Field() token_type: str = Field() - expires_in: int = Field() - issued_token_type: str = Field() + expires_in: Optional[int] = Field(default=None) + issued_token_type: Optional[str] = Field(default=None) + refresh_token: Optional[str] = Field(default=None) + scope: Optional[str] = Field(default=None) class ConfigResponse(IcebergBaseModel): diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 21b4d95556..c4668a71ec 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -108,6 +108,24 @@ def test_token_200(rest_mock: Mocker) -> None: "token_type": "Bearer", "expires_in": 86400, "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + "scope": "openid offline", + "refresh_token": "refresh_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS)._session.headers["Authorization"] # pylint: disable=W0212 + == f"Bearer {TEST_TOKEN}" + ) + + +def test_token_200_without_optional_fields(rest_mock: Mocker) -> None: + rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", }, status_code=200, request_headers=OAUTH_TEST_HEADERS, From 6708a6eaa76a2b4aab58601f10210f778b3f03a4 Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 29 Feb 2024 13:10:42 +0100 Subject: [PATCH 146/169] Update table metadata throughout transaction (#471) * Update table metadata throughout transaction This PR add support for updating the table metadata throughout the transaction. This way, if a schema is first evolved, and then a snapshot is created based on the latest schema, it will be able to find the schema. * Fix integration tests * Thanks Honah! * Include the partition evolution * Cleanup --- pyiceberg/io/pyarrow.py | 21 +- pyiceberg/table/__init__.py | 537 ++++++++++++-------------- pyiceberg/table/metadata.py | 43 +++ tests/catalog/test_sql.py | 52 +++ tests/integration/test_rest_schema.py | 23 +- tests/integration/test_writes.py | 2 +- tests/table/test_init.py | 15 +- tests/test_schema.py | 42 +- 8 files changed, 396 insertions(+), 339 deletions(-) diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index d41f7a07a5..be944ffb36 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -125,6 +125,7 @@ visit_with_partner, ) from pyiceberg.table import PropertyUtil, TableProperties, WriteTask +from pyiceberg.table.metadata import TableMetadata from pyiceberg.table.name_mapping import NameMapping from pyiceberg.transforms import TruncateTransform from pyiceberg.typedef import EMPTY_DICT, Properties, Record @@ -1720,7 +1721,7 @@ def fill_parquet_file_metadata( data_file.split_offsets = split_offsets -def write_file(table: Table, tasks: Iterator[WriteTask], file_schema: Optional[Schema] = None) -> Iterator[DataFile]: +def write_file(io: FileIO, table_metadata: TableMetadata, tasks: Iterator[WriteTask]) -> Iterator[DataFile]: task = next(tasks) try: @@ -1730,15 +1731,15 @@ def write_file(table: Table, tasks: Iterator[WriteTask], file_schema: Optional[S except StopIteration: pass - parquet_writer_kwargs = _get_parquet_writer_kwargs(table.properties) + parquet_writer_kwargs = _get_parquet_writer_kwargs(table_metadata.properties) - file_path = f'{table.location()}/data/{task.generate_data_file_filename("parquet")}' - file_schema = file_schema or table.schema() - arrow_file_schema = schema_to_pyarrow(file_schema) + file_path = f'{table_metadata.location}/data/{task.generate_data_file_filename("parquet")}' + schema = table_metadata.schema() + arrow_file_schema = schema_to_pyarrow(schema) - fo = table.io.new_output(file_path) + fo = io.new_output(file_path) row_group_size = PropertyUtil.property_as_int( - properties=table.properties, + properties=table_metadata.properties, property_name=TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES, default=TableProperties.PARQUET_ROW_GROUP_SIZE_BYTES_DEFAULT, ) @@ -1757,7 +1758,7 @@ def write_file(table: Table, tasks: Iterator[WriteTask], file_schema: Optional[S # sort_order_id=task.sort_order_id, sort_order_id=None, # Just copy these from the table for now - spec_id=table.spec().spec_id, + spec_id=table_metadata.default_spec_id, equality_ids=None, key_metadata=None, ) @@ -1765,8 +1766,8 @@ def write_file(table: Table, tasks: Iterator[WriteTask], file_schema: Optional[S fill_parquet_file_metadata( data_file=data_file, parquet_metadata=writer.writer.metadata, - stats_columns=compute_statistics_plan(file_schema, table.properties), - parquet_column_mapping=parquet_path_to_id_mapping(file_schema), + stats_columns=compute_statistics_plan(schema, table_metadata.properties), + parquet_column_mapping=parquet_path_to_id_mapping(schema), ) return iter([data_file]) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index e29369a26c..1a4183c914 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -31,6 +31,7 @@ Any, Callable, Dict, + Generic, Iterable, List, Literal, @@ -137,6 +138,7 @@ from pyiceberg.catalog import Catalog + ALWAYS_TRUE = AlwaysTrue() TABLE_ROOT_ID = -1 @@ -229,18 +231,23 @@ def property_as_int(properties: Dict[str, str], property_name: str, default: Opt class Transaction: _table: Table + table_metadata: TableMetadata + _autocommit: bool _updates: Tuple[TableUpdate, ...] _requirements: Tuple[TableRequirement, ...] - def __init__( - self, - table: Table, - actions: Optional[Tuple[TableUpdate, ...]] = None, - requirements: Optional[Tuple[TableRequirement, ...]] = None, - ): + def __init__(self, table: Table, autocommit: bool = False): + """Open a transaction to stage and commit changes to a table. + + Args: + table: The table that will be altered. + autocommit: Option to automatically commit the changes when they are staged. + """ + self.table_metadata = table.metadata self._table = table - self._updates = actions or () - self._requirements = requirements or () + self._autocommit = autocommit + self._updates = () + self._requirements = () def __enter__(self) -> Transaction: """Start a transaction to update the table.""" @@ -248,49 +255,23 @@ def __enter__(self) -> Transaction: def __exit__(self, _: Any, value: Any, traceback: Any) -> None: """Close and commit the transaction.""" - fresh_table = self.commit_transaction() - # Update the new data in place - self._table.metadata = fresh_table.metadata - self._table.metadata_location = fresh_table.metadata_location + self.commit_transaction() - def _append_updates(self, *new_updates: TableUpdate) -> Transaction: - """Append updates to the set of staged updates. + def _apply(self, updates: Tuple[TableUpdate, ...], requirements: Tuple[TableRequirement, ...] = ()) -> Transaction: + """Check if the requirements are met, and applies the updates to the metadata.""" + for requirement in requirements: + requirement.validate(self.table_metadata) - Args: - *new_updates: Any new updates. + self._updates += updates + self._requirements += requirements - Raises: - ValueError: When the type of update is not unique. + self.table_metadata = update_table_metadata(self.table_metadata, updates) - Returns: - Transaction object with the new updates appended. - """ - for new_update in new_updates: - # explicitly get type of new_update as new_update is an instantiated class - type_new_update = type(new_update) - if any(isinstance(update, type_new_update) for update in self._updates): - raise ValueError(f"Updates in a single commit need to be unique, duplicate: {type_new_update}") - self._updates = self._updates + new_updates - return self + if self._autocommit: + self.commit_transaction() + self._updates = () + self._requirements = () - def _append_requirements(self, *new_requirements: TableRequirement) -> Transaction: - """Append requirements to the set of staged requirements. - - Args: - *new_requirements: Any new requirements. - - Raises: - ValueError: When the type of requirement is not unique. - - Returns: - Transaction object with the new requirements appended. - """ - for new_requirement in new_requirements: - # explicitly get type of new_update as requirement is an instantiated class - type_new_requirement = type(new_requirement) - if any(isinstance(requirement, type_new_requirement) for requirement in self._requirements): - raise ValueError(f"Requirements in a single commit need to be unique, duplicate: {type_new_requirement}") - self._requirements = self._requirements + new_requirements return self def upgrade_table_version(self, format_version: Literal[1, 2]) -> Transaction: @@ -307,10 +288,11 @@ def upgrade_table_version(self, format_version: Literal[1, 2]) -> Transaction: if format_version < self._table.metadata.format_version: raise ValueError(f"Cannot downgrade v{self._table.metadata.format_version} table to v{format_version}") + if format_version > self._table.metadata.format_version: - return self._append_updates(UpgradeFormatVersionUpdate(format_version=format_version)) - else: - return self + return self._apply((UpgradeFormatVersionUpdate(format_version=format_version),)) + + return self def set_properties(self, **updates: str) -> Transaction: """Set properties. @@ -323,56 +305,19 @@ def set_properties(self, **updates: str) -> Transaction: Returns: The alter table builder. """ - return self._append_updates(SetPropertiesUpdate(updates=updates)) - - def add_snapshot(self, snapshot: Snapshot) -> Transaction: - """Add a new snapshot to the table. - - Returns: - The transaction with the add-snapshot staged. - """ - self._append_updates(AddSnapshotUpdate(snapshot=snapshot)) - self._append_requirements(AssertTableUUID(uuid=self._table.metadata.table_uuid)) - - return self - - def set_ref_snapshot( - self, - snapshot_id: int, - parent_snapshot_id: Optional[int], - ref_name: str, - type: str, - max_age_ref_ms: Optional[int] = None, - max_snapshot_age_ms: Optional[int] = None, - min_snapshots_to_keep: Optional[int] = None, - ) -> Transaction: - """Update a ref to a snapshot. + return self._apply((SetPropertiesUpdate(updates=updates),)) - Returns: - The transaction with the set-snapshot-ref staged - """ - self._append_updates( - SetSnapshotRefUpdate( - snapshot_id=snapshot_id, - parent_snapshot_id=parent_snapshot_id, - ref_name=ref_name, - type=type, - max_age_ref_ms=max_age_ref_ms, - max_snapshot_age_ms=max_snapshot_age_ms, - min_snapshots_to_keep=min_snapshots_to_keep, - ) - ) - - self._append_requirements(AssertRefSnapshotId(snapshot_id=parent_snapshot_id, ref="main")) - return self - - def update_schema(self) -> UpdateSchema: + def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: """Create a new UpdateSchema to alter the columns of this table. + Args: + allow_incompatible_changes: If changes are allowed that might break downstream consumers. + case_sensitive: If field names are case-sensitive. + Returns: A new UpdateSchema. """ - return UpdateSchema(self._table, self) + return UpdateSchema(self, allow_incompatible_changes=allow_incompatible_changes, case_sensitive=case_sensitive) def update_snapshot(self) -> UpdateSnapshot: """Create a new UpdateSnapshot to produce a new snapshot for the table. @@ -380,7 +325,7 @@ def update_snapshot(self) -> UpdateSnapshot: Returns: A new UpdateSnapshot """ - return UpdateSnapshot(self._table, self) + return UpdateSnapshot(self, io=self._table.io) def update_spec(self) -> UpdateSpec: """Create a new UpdateSpec to update the partitioning of the table. @@ -388,7 +333,7 @@ def update_spec(self) -> UpdateSpec: Returns: A new UpdateSpec. """ - return UpdateSpec(self._table, self) + return UpdateSpec(self) def remove_properties(self, *removals: str) -> Transaction: """Remove properties. @@ -399,7 +344,7 @@ def remove_properties(self, *removals: str) -> Transaction: Returns: The alter table builder. """ - return self._append_updates(RemovePropertiesUpdate(removals=removals)) + return self._apply((RemovePropertiesUpdate(removals=removals),)) def update_location(self, location: str) -> Transaction: """Set the new table location. @@ -412,19 +357,12 @@ def update_location(self, location: str) -> Transaction: """ raise NotImplementedError("Not yet implemented") - def schema(self) -> Schema: - try: - return next(update for update in self._updates if isinstance(update, AddSchemaUpdate)).schema_ - except StopIteration: - return self._table.schema() - def commit_transaction(self) -> Table: """Commit the changes to the catalog. Returns: The table with the updates applied. """ - # Strip the catalog name if len(self._updates) > 0: self._table._do_commit( # pylint: disable=W0212 updates=self._updates, @@ -913,7 +851,7 @@ class AssertLastAssignedPartitionId(TableRequirement): """The table's last assigned partition id must match the requirement's `last-assigned-partition-id`.""" type: Literal["assert-last-assigned-partition-id"] = Field(default="assert-last-assigned-partition-id") - last_assigned_partition_id: int = Field(..., alias="last-assigned-partition-id") + last_assigned_partition_id: Optional[int] = Field(..., alias="last-assigned-partition-id") def validate(self, base_metadata: Optional[TableMetadata]) -> None: if base_metadata is None: @@ -954,6 +892,9 @@ def validate(self, base_metadata: Optional[TableMetadata]) -> None: ) +UpdatesAndRequirements = Tuple[Tuple[TableUpdate, ...], Tuple[TableRequirement, ...]] + + class Namespace(IcebergRootModel[List[str]]): """Reference to one or more levels of a namespace.""" @@ -998,6 +939,11 @@ def __init__( self.catalog = catalog def transaction(self) -> Transaction: + """Create a new transaction object to first stage the changes, and then commit them to the catalog. + + Returns: + The transaction object + """ return Transaction(self) def refresh(self) -> Table: @@ -1080,17 +1026,6 @@ def location(self) -> str: def last_sequence_number(self) -> int: return self.metadata.last_sequence_number - def next_sequence_number(self) -> int: - return self.last_sequence_number + 1 if self.metadata.format_version > 1 else INITIAL_SEQUENCE_NUMBER - - def new_snapshot_id(self) -> int: - """Generate a new snapshot-id that's not in use.""" - snapshot_id = _generate_snapshot_id() - while self.snapshot_by_id(snapshot_id) is not None: - snapshot_id = _generate_snapshot_id() - - return snapshot_id - def current_snapshot(self) -> Optional[Snapshot]: """Get the current snapshot for this table, or None if there is no current snapshot.""" if self.metadata.current_snapshot_id is not None: @@ -1114,18 +1049,19 @@ def history(self) -> List[SnapshotLogEntry]: def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: """Create a new UpdateSchema to alter the columns of this table. - Returns: - A new UpdateSchema. - """ - return UpdateSchema(self, allow_incompatible_changes=allow_incompatible_changes, case_sensitive=case_sensitive) - - def update_snapshot(self) -> UpdateSnapshot: - """Create a new UpdateSnapshot to produce a new snapshot for the table. + Args: + allow_incompatible_changes: If changes are allowed that might break downstream consumers. + case_sensitive: If field names are case-sensitive. Returns: - A new UpdateSnapshot + A new UpdateSchema. """ - return UpdateSnapshot(self) + return UpdateSchema( + transaction=Transaction(self, autocommit=True), + allow_incompatible_changes=allow_incompatible_changes, + case_sensitive=case_sensitive, + name_mapping=self.name_mapping(), + ) def name_mapping(self) -> Optional[NameMapping]: """Return the table's field-id NameMapping.""" @@ -1154,12 +1090,15 @@ def append(self, df: pa.Table) -> None: _check_schema(self.schema(), other_schema=df.schema) - with self.update_snapshot().fast_append() as update_snapshot: - # skip writing data files if the dataframe is empty - if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, write_uuid=update_snapshot.commit_uuid, df=df) - for data_file in data_files: - update_snapshot.append_data_file(data_file) + with self.transaction() as txn: + with txn.update_snapshot().fast_append() as update_snapshot: + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files( + table_metadata=self.metadata, write_uuid=update_snapshot.commit_uuid, df=df, io=self.io + ) + for data_file in data_files: + update_snapshot.append_data_file(data_file) def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_TRUE) -> None: """ @@ -1186,15 +1125,18 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T _check_schema(self.schema(), other_schema=df.schema) - with self.update_snapshot().overwrite() as update_snapshot: - # skip writing data files if the dataframe is empty - if df.shape[0] > 0: - data_files = _dataframe_to_data_files(self, write_uuid=update_snapshot.commit_uuid, df=df) - for data_file in data_files: - update_snapshot.append_data_file(data_file) + with self.transaction() as txn: + with txn.update_snapshot().overwrite() as update_snapshot: + # skip writing data files if the dataframe is empty + if df.shape[0] > 0: + data_files = _dataframe_to_data_files( + table_metadata=self.metadata, write_uuid=update_snapshot.commit_uuid, df=df, io=self.io + ) + for data_file in data_files: + update_snapshot.append_data_file(data_file) def update_spec(self, case_sensitive: bool = True) -> UpdateSpec: - return UpdateSpec(self, case_sensitive=case_sensitive) + return UpdateSpec(Transaction(self, autocommit=True), case_sensitive=case_sensitive) def refs(self) -> Dict[str, SnapshotRef]: """Return the snapshot references in the table.""" @@ -1613,8 +1555,31 @@ class Move: other_field_id: Optional[int] = None -class UpdateSchema: - _table: Optional[Table] +U = TypeVar('U') + + +class UpdateTableMetadata(ABC, Generic[U]): + _transaction: Transaction + + def __init__(self, transaction: Transaction) -> None: + self._transaction = transaction + + @abstractmethod + def _commit(self) -> UpdatesAndRequirements: ... + + def commit(self) -> None: + self._transaction._apply(*self._commit()) + + def __exit__(self, _: Any, value: Any, traceback: Any) -> None: + """Close and commit the change.""" + self.commit() + + def __enter__(self) -> U: + """Update the table.""" + return self # type: ignore + + +class UpdateSchema(UpdateTableMetadata["UpdateSchema"]): _schema: Schema _last_column_id: itertools.count[int] _identifier_field_names: Set[str] @@ -1629,27 +1594,25 @@ class UpdateSchema: _id_to_parent: Dict[int, str] = {} _allow_incompatible_changes: bool _case_sensitive: bool - _transaction: Optional[Transaction] def __init__( self, - table: Optional[Table], - transaction: Optional[Transaction] = None, + transaction: Transaction, allow_incompatible_changes: bool = False, case_sensitive: bool = True, schema: Optional[Schema] = None, + name_mapping: Optional[NameMapping] = None, ) -> None: - self._table = table + super().__init__(transaction) if isinstance(schema, Schema): self._schema = schema self._last_column_id = itertools.count(1 + schema.highest_field_id) - elif table is not None: - self._schema = table.schema() - self._last_column_id = itertools.count(1 + table.metadata.last_column_id) else: - raise ValueError("Either provide a table or a schema") + self._schema = self._transaction.table_metadata.schema() + self._last_column_id = itertools.count(1 + self._transaction.table_metadata.last_column_id) + self._name_mapping = name_mapping self._identifier_field_names = self._schema.identifier_field_names() self._adds = {} @@ -1673,14 +1636,6 @@ def get_column_name(field_id: int) -> str: self._case_sensitive = case_sensitive self._transaction = transaction - def __exit__(self, _: Any, value: Any, traceback: Any) -> None: - """Close and commit the change.""" - return self.commit() - - def __enter__(self) -> UpdateSchema: - """Update the table.""" - return self - def case_sensitive(self, case_sensitive: bool) -> UpdateSchema: """Determine if the case of schema needs to be considered when comparing column names. @@ -2069,38 +2024,36 @@ def move_after(self, path: Union[str, Tuple[str, ...]], after_name: Union[str, T return self - def commit(self) -> None: + def _commit(self) -> UpdatesAndRequirements: """Apply the pending changes and commit.""" - if self._table is None: - raise ValueError("Requires a table to commit to") - new_schema = self._apply() - existing_schema_id = next((schema.schema_id for schema in self._table.metadata.schemas if schema == new_schema), None) + existing_schema_id = next( + (schema.schema_id for schema in self._transaction.table_metadata.schemas if schema == new_schema), None + ) + + requirements: Tuple[TableRequirement, ...] = () + updates: Tuple[TableUpdate, ...] = () # Check if it is different current schema ID - if existing_schema_id != self._table.schema().schema_id: - requirements = (AssertCurrentSchemaId(current_schema_id=self._schema.schema_id),) + if existing_schema_id != self._schema.schema_id: + requirements += (AssertCurrentSchemaId(current_schema_id=self._schema.schema_id),) if existing_schema_id is None: - last_column_id = max(self._table.metadata.last_column_id, new_schema.highest_field_id) - updates = ( + last_column_id = max(self._transaction.table_metadata.last_column_id, new_schema.highest_field_id) + updates += ( AddSchemaUpdate(schema=new_schema, last_column_id=last_column_id), SetCurrentSchemaUpdate(schema_id=-1), ) else: - updates = (SetCurrentSchemaUpdate(schema_id=existing_schema_id),) # type: ignore + updates += (SetCurrentSchemaUpdate(schema_id=existing_schema_id),) - if name_mapping := self._table.name_mapping(): + if name_mapping := self._name_mapping: updated_name_mapping = update_mapping(name_mapping, self._updates, self._adds) - updates += ( # type: ignore + updates += ( SetPropertiesUpdate(updates={TableProperties.DEFAULT_NAME_MAPPING: updated_name_mapping.model_dump_json()}), ) - if self._transaction is not None: - self._transaction._append_updates(*updates) # pylint: disable=W0212 - self._transaction._append_requirements(*requirements) # pylint: disable=W0212 - else: - self._table._do_commit(updates=updates, requirements=requirements) # pylint: disable=W0212 + return updates, requirements def _apply(self) -> Schema: """Apply the pending changes to the original schema and returns the result. @@ -2126,7 +2079,13 @@ def _apply(self) -> Schema: field_ids.add(field.field_id) - next_schema_id = 1 + (max(self._table.schemas().keys()) if self._table is not None else self._schema.schema_id) + if txn := self._transaction: + next_schema_id = 1 + ( + max(schema.schema_id for schema in txn.table_metadata.schemas) if txn.table_metadata is not None else 0 + ) + else: + next_schema_id = 0 + return Schema(*struct.fields, schema_id=next_schema_id, identifier_field_ids=field_ids) def assign_new_column_id(self) -> int: @@ -2456,20 +2415,6 @@ def _add_and_move_fields( return None if len(adds) == 0 else tuple(*fields, *adds) -def _generate_snapshot_id() -> int: - """Generate a new Snapshot ID from a UUID. - - Returns: An 64 bit long - """ - rnd_uuid = uuid.uuid4() - snapshot_id = int.from_bytes( - bytes(lhs ^ rhs for lhs, rhs in zip(rnd_uuid.bytes[0:8], rnd_uuid.bytes[8:16])), byteorder='little', signed=True - ) - snapshot_id = snapshot_id if snapshot_id >= 0 else snapshot_id * -1 - - return snapshot_id - - @dataclass(frozen=True) class WriteTask: write_uuid: uuid.UUID @@ -2496,7 +2441,7 @@ def _generate_manifest_list_path(location: str, snapshot_id: int, attempt: int, def _dataframe_to_data_files( - table: Table, df: pa.Table, write_uuid: Optional[uuid.UUID] = None, file_schema: Optional[Schema] = None + table_metadata: TableMetadata, df: pa.Table, io: FileIO, write_uuid: Optional[uuid.UUID] = None ) -> Iterable[DataFile]: """Convert a PyArrow table into a DataFile. @@ -2505,7 +2450,7 @@ def _dataframe_to_data_files( """ from pyiceberg.io.pyarrow import write_file - if len(table.spec().fields) > 0: + if len([spec for spec in table_metadata.partition_specs if spec.spec_id != 0]) > 0: raise ValueError("Cannot write to partitioned tables") counter = itertools.count(0) @@ -2513,41 +2458,33 @@ def _dataframe_to_data_files( # This is an iter, so we don't have to materialize everything every time # This will be more relevant when we start doing partitioned writes - yield from write_file(table, iter([WriteTask(write_uuid, next(counter), df)]), file_schema=file_schema) + yield from write_file(io=io, table_metadata=table_metadata, tasks=iter([WriteTask(write_uuid, next(counter), df)])) -class _MergingSnapshotProducer: +class _MergingSnapshotProducer(UpdateTableMetadata["_MergingSnapshotProducer"]): commit_uuid: uuid.UUID _operation: Operation - _table: Table _snapshot_id: int _parent_snapshot_id: Optional[int] _added_data_files: List[DataFile] - _transaction: Optional[Transaction] def __init__( self, operation: Operation, - table: Table, + transaction: Transaction, + io: FileIO, commit_uuid: Optional[uuid.UUID] = None, - transaction: Optional[Transaction] = None, ) -> None: + super().__init__(transaction) self.commit_uuid = commit_uuid or uuid.uuid4() + self._io = io self._operation = operation - self._table = table - self._snapshot_id = table.new_snapshot_id() + self._snapshot_id = self._transaction.table_metadata.new_snapshot_id() # Since we only support the main branch for now - self._parent_snapshot_id = snapshot.snapshot_id if (snapshot := self._table.current_snapshot()) else None + self._parent_snapshot_id = ( + snapshot.snapshot_id if (snapshot := self._transaction.table_metadata.current_snapshot()) else None + ) self._added_data_files = [] - self._transaction = transaction - - def __enter__(self) -> _MergingSnapshotProducer: - """Start a transaction to update the table.""" - return self - - def __exit__(self, _: Any, value: Any, traceback: Any) -> None: - """Close and commit the transaction.""" - self.commit() def append_data_file(self, data_file: DataFile) -> _MergingSnapshotProducer: self._added_data_files.append(data_file) @@ -2562,12 +2499,14 @@ def _existing_manifests(self) -> List[ManifestFile]: ... def _manifests(self) -> List[ManifestFile]: def _write_added_manifest() -> List[ManifestFile]: if self._added_data_files: - output_file_location = _new_manifest_path(location=self._table.location(), num=0, commit_uuid=self.commit_uuid) + output_file_location = _new_manifest_path( + location=self._transaction.table_metadata.location, num=0, commit_uuid=self.commit_uuid + ) with write_manifest( - format_version=self._table.format_version, - spec=self._table.spec(), - schema=self._table.schema(), - output_file=self._table.io.new_output(output_file_location), + format_version=self._transaction.table_metadata.format_version, + spec=self._transaction.table_metadata.spec(), + schema=self._transaction.table_metadata.schema(), + output_file=self._io.new_output(output_file_location), snapshot_id=self._snapshot_id, ) as writer: for data_file in self._added_data_files: @@ -2588,13 +2527,15 @@ def _write_delete_manifest() -> List[ManifestFile]: # Check if we need to mark the files as deleted deleted_entries = self._deleted_entries() if len(deleted_entries) > 0: - output_file_location = _new_manifest_path(location=self._table.location(), num=1, commit_uuid=self.commit_uuid) + output_file_location = _new_manifest_path( + location=self._transaction.table_metadata.location, num=1, commit_uuid=self.commit_uuid + ) with write_manifest( - format_version=self._table.format_version, - spec=self._table.spec(), - schema=self._table.schema(), - output_file=self._table.io.new_output(output_file_location), + format_version=self._transaction.table_metadata.format_version, + spec=self._transaction.table_metadata.spec(), + schema=self._transaction.table_metadata.schema(), + output_file=self._io.new_output(output_file_location), snapshot_id=self._snapshot_id, ) as writer: for delete_entry in deleted_entries: @@ -2617,7 +2558,11 @@ def _summary(self) -> Summary: for data_file in self._added_data_files: ssc.add_file(data_file=data_file) - previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) if self._parent_snapshot_id is not None else None + previous_snapshot = ( + self._transaction.table_metadata.snapshot_by_id(self._parent_snapshot_id) + if self._parent_snapshot_id is not None + else None + ) return update_snapshot_summaries( summary=Summary(operation=self._operation, **ssc.build()), @@ -2625,18 +2570,21 @@ def _summary(self) -> Summary: truncate_full_table=self._operation == Operation.OVERWRITE, ) - def commit(self) -> Snapshot: + def _commit(self) -> UpdatesAndRequirements: new_manifests = self._manifests() - next_sequence_number = self._table.next_sequence_number() + next_sequence_number = self._transaction.table_metadata.next_sequence_number() summary = self._summary() manifest_list_file_path = _generate_manifest_list_path( - location=self._table.location(), snapshot_id=self._snapshot_id, attempt=0, commit_uuid=self.commit_uuid + location=self._transaction.table_metadata.location, + snapshot_id=self._snapshot_id, + attempt=0, + commit_uuid=self.commit_uuid, ) with write_manifest_list( - format_version=self._table.metadata.format_version, - output_file=self._table.io.new_output(manifest_list_file_path), + format_version=self._transaction.table_metadata.format_version, + output_file=self._io.new_output(manifest_list_file_path), snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, sequence_number=next_sequence_number, @@ -2649,22 +2597,21 @@ def commit(self) -> Snapshot: manifest_list=manifest_list_file_path, sequence_number=next_sequence_number, summary=summary, - schema_id=self._table.schema().schema_id, + schema_id=self._transaction.table_metadata.current_schema_id, ) - if self._transaction is not None: - self._transaction.add_snapshot(snapshot=snapshot) - self._transaction.set_ref_snapshot( - snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, ref_name="main", type="branch" - ) - else: - with self._table.transaction() as tx: - tx.add_snapshot(snapshot=snapshot) - tx.set_ref_snapshot( + return ( + ( + AddSnapshotUpdate(snapshot=snapshot), + SetSnapshotRefUpdate( snapshot_id=self._snapshot_id, parent_snapshot_id=self._parent_snapshot_id, ref_name="main", type="branch" - ) - - return snapshot + ), + ), + ( + AssertTableUUID(uuid=self._transaction.table_metadata.table_uuid), + AssertRefSnapshotId(snapshot_id=self._parent_snapshot_id, ref="main"), + ), + ) class FastAppendFiles(_MergingSnapshotProducer): @@ -2677,12 +2624,12 @@ def _existing_manifests(self) -> List[ManifestFile]: existing_manifests = [] if self._parent_snapshot_id is not None: - previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) + previous_snapshot = self._transaction.table_metadata.snapshot_by_id(self._parent_snapshot_id) if previous_snapshot is None: raise ValueError(f"Snapshot could not be found: {self._parent_snapshot_id}") - for manifest in previous_snapshot.manifests(io=self._table.io): + for manifest in previous_snapshot.manifests(io=self._io): if manifest.has_added_files() or manifest.has_existing_files() or manifest.added_snapshot_id == self._snapshot_id: existing_manifests.append(manifest) @@ -2713,7 +2660,7 @@ def _deleted_entries(self) -> List[ManifestEntry]: which entries are affected. """ if self._parent_snapshot_id is not None: - previous_snapshot = self._table.snapshot_by_id(self._parent_snapshot_id) + previous_snapshot = self._transaction.table_metadata.snapshot_by_id(self._parent_snapshot_id) if previous_snapshot is None: # This should never happen since you cannot overwrite an empty table raise ValueError(f"Could not find the previous snapshot: {self._parent_snapshot_id}") @@ -2729,37 +2676,39 @@ def _get_entries(manifest: ManifestFile) -> List[ManifestEntry]: file_sequence_number=entry.file_sequence_number, data_file=entry.data_file, ) - for entry in manifest.fetch_manifest_entry(self._table.io, discard_deleted=True) + for entry in manifest.fetch_manifest_entry(self._io, discard_deleted=True) if entry.data_file.content == DataFileContent.DATA ] - list_of_entries = executor.map(_get_entries, previous_snapshot.manifests(self._table.io)) + list_of_entries = executor.map(_get_entries, previous_snapshot.manifests(self._io)) return list(chain(*list_of_entries)) else: return [] class UpdateSnapshot: - _table: Table - _transaction: Optional[Transaction] + _transaction: Transaction + _io: FileIO - def __init__(self, table: Table, transaction: Optional[Transaction] = None) -> None: - self._table = table + def __init__(self, transaction: Transaction, io: FileIO) -> None: self._transaction = transaction + self._io = io def fast_append(self) -> FastAppendFiles: - return FastAppendFiles(table=self._table, operation=Operation.APPEND, transaction=self._transaction) + return FastAppendFiles(operation=Operation.APPEND, transaction=self._transaction, io=self._io) def overwrite(self) -> OverwriteFiles: return OverwriteFiles( - table=self._table, - operation=Operation.OVERWRITE if self._table.current_snapshot() is not None else Operation.APPEND, + operation=Operation.OVERWRITE + if self._transaction.table_metadata.current_snapshot() is not None + else Operation.APPEND, transaction=self._transaction, + io=self._io, ) -class UpdateSpec: - _table: Table +class UpdateSpec(UpdateTableMetadata["UpdateSpec"]): + _transaction: Transaction _name_to_field: Dict[str, PartitionField] = {} _name_to_added_field: Dict[str, PartitionField] = {} _transform_to_field: Dict[Tuple[int, str], PartitionField] = {} @@ -2770,17 +2719,18 @@ class UpdateSpec: _adds: List[PartitionField] _deletes: Set[int] _last_assigned_partition_id: int - _transaction: Optional[Transaction] - def __init__(self, table: Table, transaction: Optional[Transaction] = None, case_sensitive: bool = True) -> None: - self._table = table - self._name_to_field = {field.name: field for field in table.spec().fields} + def __init__(self, transaction: Transaction, case_sensitive: bool = True) -> None: + super().__init__(transaction) + self._name_to_field = {field.name: field for field in transaction.table_metadata.spec().fields} self._name_to_added_field = {} - self._transform_to_field = {(field.source_id, repr(field.transform)): field for field in table.spec().fields} + self._transform_to_field = { + (field.source_id, repr(field.transform)): field for field in transaction.table_metadata.spec().fields + } self._transform_to_added_field = {} self._adds = [] self._deletes = set() - self._last_assigned_partition_id = table.last_partition_id() + self._last_assigned_partition_id = transaction.table_metadata.last_partition_id or PARTITION_FIELD_ID_START - 1 self._renames = {} self._transaction = transaction self._case_sensitive = case_sensitive @@ -2793,7 +2743,7 @@ def add_field( partition_field_name: Optional[str] = None, ) -> UpdateSpec: ref = Reference(source_column_name) - bound_ref = ref.bind(self._table.schema(), self._case_sensitive) + bound_ref = ref.bind(self._transaction.table_metadata.schema(), self._case_sensitive) # verify transform can actually bind it output_type = bound_ref.field.field_type if not transform.can_transform(output_type): @@ -2864,31 +2814,24 @@ def rename_field(self, name: str, new_name: str) -> UpdateSpec: self._renames[name] = new_name return self - def commit(self) -> None: + def _commit(self) -> UpdatesAndRequirements: new_spec = self._apply() - if self._table.metadata.default_spec_id != new_spec.spec_id: - if new_spec.spec_id not in self._table.specs(): - updates = [AddPartitionSpecUpdate(spec=new_spec), SetDefaultSpecUpdate(spec_id=-1)] - else: - updates = [SetDefaultSpecUpdate(spec_id=new_spec.spec_id)] - - required_last_assigned_partitioned_id = self._table.last_partition_id() - requirements = [AssertLastAssignedPartitionId(last_assigned_partition_id=required_last_assigned_partitioned_id)] + updates: Tuple[TableUpdate, ...] = () + requirements: Tuple[TableRequirement, ...] = () - if self._transaction is not None: - self._transaction._append_updates(*updates) # pylint: disable=W0212 - self._transaction._append_requirements(*requirements) # pylint: disable=W0212 + if self._transaction.table_metadata.default_spec_id != new_spec.spec_id: + if new_spec.spec_id not in self._transaction.table_metadata.specs(): + updates = ( + AddPartitionSpecUpdate(spec=new_spec), + SetDefaultSpecUpdate(spec_id=-1), + ) else: - requirements.append(AssertDefaultSpecId(default_spec_id=self._table.spec().spec_id)) - self._table._do_commit(updates=tuple(updates), requirements=tuple(requirements)) # pylint: disable=W0212 + updates = (SetDefaultSpecUpdate(spec_id=new_spec.spec_id),) - def __exit__(self, _: Any, value: Any, traceback: Any) -> None: - """Close and commit the change.""" - return self.commit() + required_last_assigned_partitioned_id = self._transaction.table_metadata.last_partition_id + requirements = (AssertLastAssignedPartitionId(last_assigned_partition_id=required_last_assigned_partitioned_id),) - def __enter__(self) -> UpdateSpec: - """Update the table.""" - return self + return updates, requirements def _apply(self) -> PartitionSpec: def _check_and_add_partition_name(schema: Schema, name: str, source_id: int, partition_names: Set[str]) -> None: @@ -2915,27 +2858,47 @@ def _add_new_field( partition_fields = [] partition_names: Set[str] = set() - for field in self._table.spec().fields: + for field in self._transaction.table_metadata.spec().fields: if field.field_id not in self._deletes: renamed = self._renames.get(field.name) if renamed: new_field = _add_new_field( - self._table.schema(), field.source_id, field.field_id, renamed, field.transform, partition_names + self._transaction.table_metadata.schema(), + field.source_id, + field.field_id, + renamed, + field.transform, + partition_names, ) else: new_field = _add_new_field( - self._table.schema(), field.source_id, field.field_id, field.name, field.transform, partition_names + self._transaction.table_metadata.schema(), + field.source_id, + field.field_id, + field.name, + field.transform, + partition_names, ) partition_fields.append(new_field) - elif self._table.format_version == 1: + elif self._transaction.table_metadata.format_version == 1: renamed = self._renames.get(field.name) if renamed: new_field = _add_new_field( - self._table.schema(), field.source_id, field.field_id, renamed, VoidTransform(), partition_names + self._transaction.table_metadata.schema(), + field.source_id, + field.field_id, + renamed, + VoidTransform(), + partition_names, ) else: new_field = _add_new_field( - self._table.schema(), field.source_id, field.field_id, field.name, VoidTransform(), partition_names + self._transaction.table_metadata.schema(), + field.source_id, + field.field_id, + field.name, + VoidTransform(), + partition_names, ) partition_fields.append(new_field) @@ -2952,7 +2915,7 @@ def _add_new_field( # Reuse spec id or create a new one. new_spec = PartitionSpec(*partition_fields) new_spec_id = INITIAL_PARTITION_SPEC_ID - for spec in self._table.specs().values(): + for spec in self._transaction.table_metadata.specs().values(): if new_spec.compatible_with(spec): new_spec_id = spec.spec_id break @@ -2961,10 +2924,10 @@ def _add_new_field( return PartitionSpec(*partition_fields, spec_id=new_spec_id) def _partition_field(self, transform_key: Tuple[int, Transform[Any, Any]], name: Optional[str]) -> PartitionField: - if self._table.metadata.format_version == 2: + if self._transaction.table_metadata.format_version == 2: source_id, transform = transform_key historical_fields = [] - for spec in self._table.specs().values(): + for spec in self._transaction.table_metadata.specs().values(): for field in spec.fields: historical_fields.append((field.source_id, field.field_id, repr(field.transform), field.name)) @@ -2976,7 +2939,7 @@ def _partition_field(self, transform_key: Tuple[int, Transform[Any, Any]], name: new_field_id = self._new_field_id() if name is None: tmp_field = PartitionField(transform_key[0], new_field_id, transform_key[1], 'unassigned_field_name') - name = _visit_partition_field(self._table.schema(), tmp_field, _PartitionNameGenerator()) + name = _visit_partition_field(self._transaction.table_metadata.schema(), tmp_field, _PartitionNameGenerator()) return PartitionField(transform_key[0], new_field_id, transform_key[1], name) def _new_field_id(self) -> int: diff --git a/pyiceberg/table/metadata.py b/pyiceberg/table/metadata.py index 2fd8b13af4..c716915192 100644 --- a/pyiceberg/table/metadata.py +++ b/pyiceberg/table/metadata.py @@ -226,11 +226,54 @@ def schema_by_id(self, schema_id: int) -> Optional[Schema]: """Get the schema by schema_id.""" return next((schema for schema in self.schemas if schema.schema_id == schema_id), None) + def schema(self) -> Schema: + """Return the schema for this table.""" + return next(schema for schema in self.schemas if schema.schema_id == self.current_schema_id) + + def spec(self) -> PartitionSpec: + """Return the partition spec of this table.""" + return next(spec for spec in self.partition_specs if spec.spec_id == self.default_spec_id) + + def specs(self) -> Dict[int, PartitionSpec]: + """Return a dict the partition specs this table.""" + return {spec.spec_id: spec for spec in self.partition_specs} + + def new_snapshot_id(self) -> int: + """Generate a new snapshot-id that's not in use.""" + snapshot_id = _generate_snapshot_id() + while self.snapshot_by_id(snapshot_id) is not None: + snapshot_id = _generate_snapshot_id() + + return snapshot_id + + def current_snapshot(self) -> Optional[Snapshot]: + """Get the current snapshot for this table, or None if there is no current snapshot.""" + if self.current_snapshot_id is not None: + return self.snapshot_by_id(self.current_snapshot_id) + return None + + def next_sequence_number(self) -> int: + return self.last_sequence_number + 1 if self.format_version > 1 else INITIAL_SEQUENCE_NUMBER + def sort_order_by_id(self, sort_order_id: int) -> Optional[SortOrder]: """Get the sort order by sort_order_id.""" return next((sort_order for sort_order in self.sort_orders if sort_order.order_id == sort_order_id), None) +def _generate_snapshot_id() -> int: + """Generate a new Snapshot ID from a UUID. + + Returns: An 64 bit long + """ + rnd_uuid = uuid.uuid4() + snapshot_id = int.from_bytes( + bytes(lhs ^ rhs for lhs, rhs in zip(rnd_uuid.bytes[0:8], rnd_uuid.bytes[8:16])), byteorder='little', signed=True + ) + snapshot_id = snapshot_id if snapshot_id >= 0 else snapshot_id * -1 + + return snapshot_id + + class TableMetadataV1(TableMetadataCommonFields, IcebergBaseModel): """Represents version 1 of the Table Metadata. diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 2b1167f1b6..9f4d4af4c7 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -39,6 +39,7 @@ from pyiceberg.io.pyarrow import schema_to_pyarrow from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC from pyiceberg.schema import Schema +from pyiceberg.table import _dataframe_to_data_files from pyiceberg.table.snapshots import Operation from pyiceberg.table.sorting import ( NullOrder, @@ -863,3 +864,54 @@ def test_concurrent_commit_table(catalog: SqlCatalog, table_schema_simple: Schem # This one should fail since it already has been updated with table_b.update_schema() as update: update.add_column(path="c", field_type=IntegerType()) + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +@pytest.mark.parametrize("format_version", [1, 2]) +def test_write_and_evolve(catalog: SqlCatalog, format_version: int) -> None: + identifier = f"default.arrow_write_data_and_evolve_schema_v{format_version}" + + try: + catalog.create_namespace("default") + except NamespaceAlreadyExistsError: + pass + + try: + catalog.drop_table(identifier=identifier) + except NoSuchTableError: + pass + + pa_table = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + }, + schema=pa.schema([pa.field("foo", pa.string(), nullable=True)]), + ) + + tbl = catalog.create_table(identifier=identifier, schema=pa_table.schema, properties={"format-version": str(format_version)}) + + pa_table_with_column = pa.Table.from_pydict( + { + 'foo': ['a', None, 'z'], + 'bar': [19, None, 25], + }, + schema=pa.schema([ + pa.field("foo", pa.string(), nullable=True), + pa.field("bar", pa.int32(), nullable=True), + ]), + ) + + with tbl.transaction() as txn: + with txn.update_schema() as schema_txn: + schema_txn.union_by_name(pa_table_with_column.schema) + + with txn.update_snapshot().fast_append() as snapshot_update: + for data_file in _dataframe_to_data_files(table_metadata=txn.table_metadata, df=pa_table_with_column, io=tbl.io): + snapshot_update.append_data_file(data_file) diff --git a/tests/integration/test_rest_schema.py b/tests/integration/test_rest_schema.py index aae07caba9..17fb338080 100644 --- a/tests/integration/test_rest_schema.py +++ b/tests/integration/test_rest_schema.py @@ -84,7 +84,7 @@ def _create_table_with_schema(catalog: Catalog, schema: Schema) -> Table: @pytest.mark.integration def test_add_already_exists(catalog: Catalog, table_schema_nested: Schema) -> None: table = _create_table_with_schema(catalog, table_schema_nested) - update = UpdateSchema(table) + update = table.update_schema() with pytest.raises(ValueError) as exc_info: update.add_column("foo", IntegerType()) @@ -98,7 +98,7 @@ def test_add_already_exists(catalog: Catalog, table_schema_nested: Schema) -> No @pytest.mark.integration def test_add_to_non_struct_type(catalog: Catalog, table_schema_simple: Schema) -> None: table = _create_table_with_schema(catalog, table_schema_simple) - update = UpdateSchema(table) + update = table.update_schema() with pytest.raises(ValueError) as exc_info: update.add_column(path=("foo", "lat"), field_type=IntegerType()) assert "Cannot add column 'lat' to non-struct type: foo" in str(exc_info.value) @@ -1066,13 +1066,13 @@ def test_add_nested_list_of_structs(catalog: Catalog) -> None: def test_add_required_column(catalog: Catalog) -> None: schema_ = Schema(NestedField(field_id=1, name="a", field_type=BooleanType(), required=False)) table = _create_table_with_schema(catalog, schema_) - update = UpdateSchema(table) + update = table.update_schema() with pytest.raises(ValueError) as exc_info: update.add_column(path="data", field_type=IntegerType(), required=True) assert "Incompatible change: cannot add required column: data" in str(exc_info.value) new_schema = ( - UpdateSchema(table, allow_incompatible_changes=True) # pylint: disable=W0212 + UpdateSchema(transaction=table.transaction(), allow_incompatible_changes=True) .add_column(path="data", field_type=IntegerType(), required=True) ._apply() ) @@ -1088,12 +1088,13 @@ def test_add_required_column_case_insensitive(catalog: Catalog) -> None: table = _create_table_with_schema(catalog, schema_) with pytest.raises(ValueError) as exc_info: - with UpdateSchema(table, allow_incompatible_changes=True) as update: - update.case_sensitive(False).add_column(path="ID", field_type=IntegerType(), required=True) + with table.transaction() as txn: + with txn.update_schema(allow_incompatible_changes=True) as update: + update.case_sensitive(False).add_column(path="ID", field_type=IntegerType(), required=True) assert "already exists: ID" in str(exc_info.value) new_schema = ( - UpdateSchema(table, allow_incompatible_changes=True) # pylint: disable=W0212 + UpdateSchema(transaction=table.transaction(), allow_incompatible_changes=True) .add_column(path="ID", field_type=IntegerType(), required=True) ._apply() ) @@ -1264,7 +1265,7 @@ def test_mixed_changes(catalog: Catalog) -> None: @pytest.mark.integration def test_ambiguous_column(catalog: Catalog, table_schema_nested: Schema) -> None: table = _create_table_with_schema(catalog, table_schema_nested) - update = UpdateSchema(table) + update = UpdateSchema(transaction=table.transaction()) with pytest.raises(ValueError) as exc_info: update.add_column(path="location.latitude", field_type=IntegerType()) @@ -2507,16 +2508,14 @@ def test_two_add_schemas_in_a_single_transaction(catalog: Catalog) -> None: ), ) - with pytest.raises(ValueError) as exc_info: + with pytest.raises(CommitFailedException) as exc_info: with tbl.transaction() as tr: with tr.update_schema() as update: update.add_column("bar", field_type=StringType()) with tr.update_schema() as update: update.add_column("baz", field_type=StringType()) - assert "Updates in a single commit need to be unique, duplicate: " in str( - exc_info.value - ) + assert "CommitFailedException: Requirement failed: current schema changed: expected id 1 != 0" in str(exc_info.value) @pytest.mark.integration diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index a16ba48a27..388e566bce 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -652,5 +652,5 @@ def test_write_and_evolve(session_catalog: Catalog, format_version: int) -> None schema_txn.union_by_name(pa_table_with_column.schema) with txn.update_snapshot().fast_append() as snapshot_update: - for data_file in _dataframe_to_data_files(table=tbl, df=pa_table_with_column, file_schema=txn.schema()): + for data_file in _dataframe_to_data_files(table_metadata=txn.table_metadata, df=pa_table_with_column, io=tbl.io): snapshot_update.append_data_file(data_file) diff --git a/tests/table/test_init.py b/tests/table/test_init.py index 39aa72f8db..e6407b60cb 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -62,12 +62,11 @@ UpdateSchema, _apply_table_update, _check_schema, - _generate_snapshot_id, _match_deletes_to_data_file, _TableMetadataUpdateContext, update_table_metadata, ) -from pyiceberg.table.metadata import INITIAL_SEQUENCE_NUMBER, TableMetadataUtil, TableMetadataV2 +from pyiceberg.table.metadata import INITIAL_SEQUENCE_NUMBER, TableMetadataUtil, TableMetadataV2, _generate_snapshot_id from pyiceberg.table.snapshots import ( Operation, Snapshot, @@ -435,7 +434,7 @@ def test_serialize_set_properties_updates() -> None: def test_add_column(table_v2: Table) -> None: - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) update.add_column(path="b", field_type=IntegerType()) apply_schema: Schema = update._apply() # pylint: disable=W0212 assert len(apply_schema.fields) == 4 @@ -469,7 +468,7 @@ def test_add_primitive_type_column(table_v2: Table) -> None: for name, type_ in primitive_type.items(): field_name = f"new_column_{name}" - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) update.add_column(path=field_name, field_type=type_, doc=f"new_column_{name}") new_schema = update._apply() # pylint: disable=W0212 @@ -481,7 +480,7 @@ def test_add_primitive_type_column(table_v2: Table) -> None: def test_add_nested_type_column(table_v2: Table) -> None: # add struct type column field_name = "new_column_struct" - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) struct_ = StructType( NestedField(1, "lat", DoubleType()), NestedField(2, "long", DoubleType()), @@ -499,7 +498,7 @@ def test_add_nested_type_column(table_v2: Table) -> None: def test_add_nested_map_type_column(table_v2: Table) -> None: # add map type column field_name = "new_column_map" - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) map_ = MapType(1, StringType(), 2, IntegerType(), False) update.add_column(path=field_name, field_type=map_) new_schema = update._apply() # pylint: disable=W0212 @@ -511,7 +510,7 @@ def test_add_nested_map_type_column(table_v2: Table) -> None: def test_add_nested_list_type_column(table_v2: Table) -> None: # add list type column field_name = "new_column_list" - update = UpdateSchema(table_v2) + update = UpdateSchema(transaction=table_v2.transaction()) list_ = ListType( element_id=101, element_type=StructType( @@ -806,7 +805,7 @@ def test_metadata_isolation_from_illegal_updates(table_v1: Table) -> None: def test_generate_snapshot_id(table_v2: Table) -> None: assert isinstance(_generate_snapshot_id(), int) - assert isinstance(table_v2.new_snapshot_id(), int) + assert isinstance(table_v2.metadata.new_snapshot_id(), int) def test_assert_create(table_v2: Table) -> None: diff --git a/tests/test_schema.py b/tests/test_schema.py index cfee6e7f14..6394b72ba6 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -928,7 +928,7 @@ def primitive_fields() -> List[NestedField]: def test_add_top_level_primitives(primitive_fields: NestedField) -> None: for primitive_field in primitive_fields: new_schema = Schema(primitive_field) - applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore assert applied == new_schema @@ -942,7 +942,7 @@ def test_add_top_level_list_of_primitives(primitive_fields: NestedField) -> None required=False, ) ) - applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() @@ -958,7 +958,7 @@ def test_add_top_level_map_of_primitives(primitive_fields: NestedField) -> None: required=False, ) ) - applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() @@ -972,7 +972,7 @@ def test_add_top_struct_of_primitives(primitive_fields: NestedField) -> None: required=False, ) ) - applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() @@ -987,7 +987,7 @@ def test_add_nested_primitive(primitive_fields: NestedField) -> None: required=False, ) ) - applied = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + applied = UpdateSchema(None, None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() @@ -1007,7 +1007,7 @@ def test_add_nested_primitives(primitive_fields: NestedField) -> None: field_id=1, name="aStruct", field_type=StructType(*_primitive_fields(TEST_PRIMITIVE_TYPES, 2)), required=False ) ) - applied = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() @@ -1048,7 +1048,7 @@ def test_add_nested_lists(primitive_fields: NestedField) -> None: required=False, ) ) - applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() @@ -1098,7 +1098,7 @@ def test_add_nested_struct(primitive_fields: NestedField) -> None: required=False, ) ) - applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() @@ -1141,7 +1141,7 @@ def test_add_nested_maps(primitive_fields: NestedField) -> None: required=False, ) ) - applied = UpdateSchema(None, schema=Schema()).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=Schema()).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() @@ -1164,7 +1164,7 @@ def test_detect_invalid_top_level_list() -> None: ) with pytest.raises(ValidationError, match="Cannot change column type: aList.element: string -> double"): - _ = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + _ = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore def test_detect_invalid_top_level_maps() -> None: @@ -1186,14 +1186,14 @@ def test_detect_invalid_top_level_maps() -> None: ) with pytest.raises(ValidationError, match="Cannot change column type: aMap.key: string -> uuid"): - _ = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + _ = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore def test_promote_float_to_double() -> None: current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=FloatType(), required=False)) new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DoubleType(), required=False)) - applied = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() assert len(applied.fields) == 1 @@ -1205,7 +1205,7 @@ def test_detect_invalid_promotion_double_to_float() -> None: new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=FloatType(), required=False)) with pytest.raises(ValidationError, match="Cannot change column type: aCol: double -> float"): - _ = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + _ = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore # decimal(P,S) Fixed-point decimal; precision P, scale S -> Scale is fixed [1], @@ -1214,7 +1214,7 @@ def test_type_promote_decimal_to_fixed_scale_with_wider_precision() -> None: current_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DecimalType(precision=20, scale=1), required=False)) new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=DecimalType(precision=22, scale=1), required=False)) - applied = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore assert applied.as_struct() == new_schema.as_struct() assert len(applied.fields) == 1 @@ -1282,7 +1282,7 @@ def test_add_nested_structs(primitive_fields: NestedField) -> None: required=False, ) ) - applied = UpdateSchema(None, schema=schema).union_by_name(new_schema)._apply() + applied = UpdateSchema(transaction=None, schema=schema).union_by_name(new_schema)._apply() # type: ignore expected = Schema( NestedField( @@ -1322,7 +1322,7 @@ def test_replace_list_with_primitive() -> None: new_schema = Schema(NestedField(field_id=1, name="aCol", field_type=StringType())) with pytest.raises(ValidationError, match="Cannot change column type: list is not a primitive"): - _ = UpdateSchema(None, schema=current_schema).union_by_name(new_schema)._apply() + _ = UpdateSchema(transaction=None, schema=current_schema).union_by_name(new_schema)._apply() # type: ignore def test_mirrored_schemas() -> None: @@ -1345,7 +1345,7 @@ def test_mirrored_schemas() -> None: NestedField(9, "string6", StringType(), required=False), ) - applied = UpdateSchema(None, schema=current_schema).union_by_name(mirrored_schema)._apply() + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(mirrored_schema)._apply() # type: ignore assert applied.as_struct() == current_schema.as_struct() @@ -1397,7 +1397,7 @@ def test_add_new_top_level_struct() -> None: ), ) - applied = UpdateSchema(None, schema=current_schema).union_by_name(observed_schema)._apply() + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(observed_schema)._apply() # type: ignore assert applied.as_struct() == observed_schema.as_struct() @@ -1476,7 +1476,7 @@ def test_append_nested_struct() -> None: ) ) - applied = UpdateSchema(None, schema=current_schema).union_by_name(observed_schema)._apply() + applied = UpdateSchema(transaction=None, schema=current_schema).union_by_name(observed_schema)._apply() # type: ignore assert applied.as_struct() == observed_schema.as_struct() @@ -1541,7 +1541,7 @@ def test_append_nested_lists() -> None: required=False, ) ) - union = UpdateSchema(None, schema=current_schema).union_by_name(observed_schema)._apply() + union = UpdateSchema(transaction=None, schema=current_schema).union_by_name(observed_schema)._apply() # type: ignore expected = Schema( NestedField( @@ -1591,7 +1591,7 @@ def test_union_with_pa_schema(primitive_fields: NestedField) -> None: pa.field("baz", pa.bool_(), nullable=True), ]) - new_schema = UpdateSchema(None, schema=base_schema).union_by_name(pa_schema)._apply() + new_schema = UpdateSchema(transaction=None, schema=base_schema).union_by_name(pa_schema)._apply() # type: ignore expected_schema = Schema( NestedField(field_id=1, name="foo", field_type=StringType(), required=True), From d56ddddc4e81d9efcb1fbd57bd89ed99b424619d Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Thu, 29 Feb 2024 12:49:07 -0800 Subject: [PATCH 147/169] Allow non-string typed values in table properties (#469) * property accept int https://stackoverflow.com/questions/77304167/using-pydantic-to-change-int-to-string https://docs.pydantic.dev/latest/concepts/validators/\#field-validators * add tests * add integration tests * pr feedback * make validator reusable * show key when none --- pyiceberg/catalog/rest.py | 9 +++++--- pyiceberg/table/metadata.py | 6 ++++- pyiceberg/typedef.py | 2 +- pyiceberg/types.py | 9 ++++++++ tests/catalog/test_base.py | 22 ++++++++++++++++-- tests/catalog/test_sql.py | 39 +++++++++++++++++++++++++++++++- tests/integration/test_writes.py | 39 ++++++++++++++++++++++++++++++-- tests/table/test_init.py | 29 +++++++++++++++++++++++- 8 files changed, 144 insertions(+), 11 deletions(-) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index e0e88ec450..2adeafb593 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -28,7 +28,7 @@ Union, ) -from pydantic import Field, ValidationError +from pydantic import Field, ValidationError, field_validator from requests import HTTPError, Session from tenacity import RetryCallState, retry, retry_if_exception_type, stop_after_attempt @@ -69,6 +69,7 @@ ) from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder, assign_fresh_sort_order_ids from pyiceberg.typedef import EMPTY_DICT, UTF8, IcebergBaseModel +from pyiceberg.types import transform_dict_value_to_str if TYPE_CHECKING: import pyarrow as pa @@ -147,6 +148,8 @@ class CreateTableRequest(IcebergBaseModel): write_order: Optional[SortOrder] = Field(alias="write-order") stage_create: bool = Field(alias="stage-create", default=False) properties: Properties = Field(default_factory=dict) + # validators + transform_properties_dict_value_to_str = field_validator('properties', mode='before')(transform_dict_value_to_str) class RegisterTableRequest(IcebergBaseModel): @@ -234,9 +237,9 @@ def _create_session(self) -> Session: # Sets the client side and server side SSL cert verification, if provided as properties. if ssl_config := self.properties.get(SSL): - if ssl_ca_bundle := ssl_config.get(CA_BUNDLE): # type: ignore + if ssl_ca_bundle := ssl_config.get(CA_BUNDLE): session.verify = ssl_ca_bundle - if ssl_client := ssl_config.get(CLIENT): # type: ignore + if ssl_client := ssl_config.get(CLIENT): if all(k in ssl_client for k in (CERT, KEY)): session.cert = (ssl_client[CERT], ssl_client[KEY]) elif ssl_client_cert := ssl_client.get(CERT): diff --git a/pyiceberg/table/metadata.py b/pyiceberg/table/metadata.py index c716915192..931b0cfe0a 100644 --- a/pyiceberg/table/metadata.py +++ b/pyiceberg/table/metadata.py @@ -28,7 +28,7 @@ Union, ) -from pydantic import Field, model_validator +from pydantic import Field, field_validator, model_validator from pydantic import ValidationError as PydanticValidationError from typing_extensions import Annotated @@ -49,6 +49,7 @@ IcebergRootModel, Properties, ) +from pyiceberg.types import transform_dict_value_to_str from pyiceberg.utils.datetime import datetime_to_millis CURRENT_SNAPSHOT_ID = "current-snapshot-id" @@ -218,6 +219,9 @@ class TableMetadataCommonFields(IcebergBaseModel): There is always a main branch reference pointing to the current-snapshot-id even if the refs map is null.""" + # validators + transform_properties_dict_value_to_str = field_validator('properties', mode='before')(transform_dict_value_to_str) + def snapshot_by_id(self, snapshot_id: int) -> Optional[Snapshot]: """Get the snapshot by snapshot_id.""" return next((snapshot for snapshot in self.snapshots if snapshot.snapshot_id == snapshot_id), None) diff --git a/pyiceberg/typedef.py b/pyiceberg/typedef.py index 56a3d3c72d..e57bf3490c 100644 --- a/pyiceberg/typedef.py +++ b/pyiceberg/typedef.py @@ -73,7 +73,7 @@ def __missing__(self, key: K) -> V: Identifier = Tuple[str, ...] -Properties = Dict[str, str] +Properties = Dict[str, Any] RecursiveDict = Dict[str, Union[str, "RecursiveDict"]] # Represents the literal value diff --git a/pyiceberg/types.py b/pyiceberg/types.py index eb215121dc..746f03ea0b 100644 --- a/pyiceberg/types.py +++ b/pyiceberg/types.py @@ -37,6 +37,7 @@ from typing import ( Any, ClassVar, + Dict, Literal, Optional, Tuple, @@ -61,6 +62,14 @@ FIXED_PARSER = ParseNumberFromBrackets(FIXED) +def transform_dict_value_to_str(dict: Dict[str, Any]) -> Dict[str, str]: + """Transform all values in the dictionary to string. Raise an error if any value is None.""" + for key, value in dict.items(): + if value is None: + raise ValueError(f"None type is not a supported value in properties: {key}") + return {k: str(v) for k, v in dict.items()} + + def _parse_decimal_type(decimal: Any) -> Tuple[int, int]: if isinstance(decimal, str): matches = DECIMAL_REGEX.search(decimal) diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index 21c7ec1521..c7d3f01ff1 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -26,6 +26,7 @@ import pyarrow as pa import pytest +from pydantic_core import ValidationError from pytest_lazyfixture import lazy_fixture from pyiceberg.catalog import ( @@ -255,13 +256,16 @@ def catalog() -> InMemoryCatalog: NAMESPACE_NOT_EMPTY_ERROR = "Namespace is not empty: \\('com', 'organization', 'department'\\)" -def given_catalog_has_a_table(catalog: InMemoryCatalog) -> Table: +def given_catalog_has_a_table( + catalog: InMemoryCatalog, + properties: Properties = EMPTY_DICT, +) -> Table: return catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, - properties=TEST_TABLE_PROPERTIES, + properties=properties or TEST_TABLE_PROPERTIES, ) @@ -661,3 +665,17 @@ def test_add_column_with_statement(catalog: InMemoryCatalog) -> None: def test_catalog_repr(catalog: InMemoryCatalog) -> None: s = repr(catalog) assert s == "test.in.memory.catalog ()" + + +def test_table_properties_int_value(catalog: InMemoryCatalog) -> None: + # table properties can be set to int, but still serialized to string + property_with_int = {"property_name": 42} + given_table = given_catalog_has_a_table(catalog, properties=property_with_int) + assert isinstance(given_table.properties["property_name"], str) + + +def test_table_properties_raise_for_none_value(catalog: InMemoryCatalog) -> None: + property_with_none = {"property_name": None} + with pytest.raises(ValidationError) as exc_info: + _ = given_catalog_has_a_table(catalog, properties=property_with_none) + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 9f4d4af4c7..0b869d6826 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -21,6 +21,7 @@ import pyarrow as pa import pytest +from pydantic_core import ValidationError from pytest_lazyfixture import lazy_fixture from sqlalchemy.exc import ArgumentError, IntegrityError @@ -640,7 +641,7 @@ def test_create_namespace_with_null_properties(catalog: SqlCatalog, database_nam catalog.create_namespace(namespace=database_name, properties={None: "value"}) # type: ignore with pytest.raises(IntegrityError): - catalog.create_namespace(namespace=database_name, properties={"key": None}) # type: ignore + catalog.create_namespace(namespace=database_name, properties={"key": None}) @pytest.mark.parametrize( @@ -915,3 +916,39 @@ def test_write_and_evolve(catalog: SqlCatalog, format_version: int) -> None: with txn.update_snapshot().fast_append() as snapshot_update: for data_file in _dataframe_to_data_files(table_metadata=txn.table_metadata, df=pa_table_with_column, io=tbl.io): snapshot_update.append_data_file(data_file) + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +def test_table_properties_int_value(catalog: SqlCatalog, table_schema_simple: Schema, random_identifier: Identifier) -> None: + # table properties can be set to int, but still serialized to string + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + property_with_int = {"property_name": 42} + table = catalog.create_table(random_identifier, table_schema_simple, properties=property_with_int) + assert isinstance(table.properties["property_name"], str) + + +@pytest.mark.parametrize( + 'catalog', + [ + lazy_fixture('catalog_memory'), + lazy_fixture('catalog_sqlite'), + lazy_fixture('catalog_sqlite_without_rowcount'), + ], +) +def test_table_properties_raise_for_none_value( + catalog: SqlCatalog, table_schema_simple: Schema, random_identifier: Identifier +) -> None: + database_name, _table_name = random_identifier + catalog.create_namespace(database_name) + property_with_none = {"property_name": None} + with pytest.raises(ValidationError) as exc_info: + _ = catalog.create_table(random_identifier, table_schema_simple, properties=property_with_none) + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index 388e566bce..1eeec2b3f7 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -28,6 +28,7 @@ import pytest import pytz from pyarrow.fs import S3FileSystem +from pydantic_core import ValidationError from pyspark.sql import SparkSession from pytest_mock.plugin import MockerFixture @@ -403,7 +404,7 @@ def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_w @pytest.mark.integration -@pytest.mark.parametrize("format_version", ["1", "2"]) +@pytest.mark.parametrize("format_version", [1, 2]) @pytest.mark.parametrize( "properties, expected_compression_name", [ @@ -419,7 +420,7 @@ def test_write_parquet_compression_properties( spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, - format_version: str, + format_version: int, properties: Dict[str, Any], expected_compression_name: str, ) -> None: @@ -654,3 +655,37 @@ def test_write_and_evolve(session_catalog: Catalog, format_version: int) -> None with txn.update_snapshot().fast_append() as snapshot_update: for data_file in _dataframe_to_data_files(table_metadata=txn.table_metadata, df=pa_table_with_column, io=tbl.io): snapshot_update.append_data_file(data_file) + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +def test_table_properties_int_value( + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + format_version: int, +) -> None: + # table properties can be set to int, but still serialized to string + property_with_int = {"property_name": 42} + identifier = "default.test_table_properties_int_value" + + tbl = _create_table( + session_catalog, identifier, {"format-version": format_version, **property_with_int}, [arrow_table_with_null] + ) + assert isinstance(tbl.properties["property_name"], str) + + +@pytest.mark.integration +@pytest.mark.parametrize("format_version", [1, 2]) +def test_table_properties_raise_for_none_value( + session_catalog: Catalog, + arrow_table_with_null: pa.Table, + format_version: int, +) -> None: + property_with_none = {"property_name": None} + identifier = "default.test_table_properties_raise_for_none_value" + + with pytest.raises(ValidationError) as exc_info: + _ = _create_table( + session_catalog, identifier, {"format-version": format_version, **property_with_none}, [arrow_table_with_null] + ) + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) diff --git a/tests/table/test_init.py b/tests/table/test_init.py index e6407b60cb..04efc5f402 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -17,10 +17,11 @@ # pylint:disable=redefined-outer-name import uuid from copy import copy -from typing import Dict +from typing import Any, Dict import pyarrow as pa import pytest +from pydantic import ValidationError from sortedcontainers import SortedList from pyiceberg.catalog.noop import NoopCatalog @@ -1081,3 +1082,29 @@ def test_schema_mismatch_additional_field(table_schema_simple: Schema) -> None: with pytest.raises(ValueError, match=expected): _check_schema(table_schema_simple, other_schema) + + +def test_table_properties(example_table_metadata_v2: Dict[str, Any]) -> None: + # metadata properties are all strings + for k, v in example_table_metadata_v2["properties"].items(): + assert isinstance(k, str) + assert isinstance(v, str) + metadata = TableMetadataV2(**example_table_metadata_v2) + for k, v in metadata.properties.items(): + assert isinstance(k, str) + assert isinstance(v, str) + + # property can be set to int, but still serialized as string + property_with_int = {"property_name": 42} + new_example_table_metadata_v2 = {**example_table_metadata_v2, "properties": property_with_int} + assert isinstance(new_example_table_metadata_v2["properties"]["property_name"], int) + new_metadata = TableMetadataV2(**new_example_table_metadata_v2) + assert isinstance(new_metadata.properties["property_name"], str) + + +def test_table_properties_raise_for_none_value(example_table_metadata_v2: Dict[str, Any]) -> None: + property_with_none = {"property_name": None} + example_table_metadata_v2 = {**example_table_metadata_v2, "properties": property_with_none} + with pytest.raises(ValidationError) as exc_info: + TableMetadataV2(**example_table_metadata_v2) + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) From 4b7e9182a15c1da60c2d8668fabe063f8ab6b5b2 Mon Sep 17 00:00:00 2001 From: Adrian Qin <147659252+jqin61@users.noreply.github.com> Date: Thu, 29 Feb 2024 16:14:26 -0500 Subject: [PATCH 148/169] Construction of filenames for partitioned writes (#453) * PartitionKey Class And Tests * fix linting; add decimal input transform test * fix bool to path lower case; fix timestamptz tests; other pr comments * clean up * add uuid partition type * clean up; rename ambiguous function name --- pyiceberg/partitioning.py | 101 ++- pyiceberg/transforms.py | 5 + tests/conftest.py | 51 +- tests/integration/test_partitioning_key.py | 772 +++++++++++++++++++++ tests/integration/test_writes.py | 48 -- 5 files changed, 925 insertions(+), 52 deletions(-) create mode 100644 tests/integration/test_partitioning_key.py diff --git a/pyiceberg/partitioning.py b/pyiceberg/partitioning.py index cd5a957b22..6fa0286282 100644 --- a/pyiceberg/partitioning.py +++ b/pyiceberg/partitioning.py @@ -16,9 +16,21 @@ # under the License. from __future__ import annotations +import uuid from abc import ABC, abstractmethod +from dataclasses import dataclass +from datetime import date, datetime from functools import cached_property, singledispatch -from typing import Any, Dict, Generic, List, Optional, Tuple, TypeVar +from typing import ( + Any, + Dict, + Generic, + List, + Optional, + Tuple, + TypeVar, +) +from urllib.parse import quote from pydantic import ( BeforeValidator, @@ -41,8 +53,18 @@ YearTransform, parse_transform, ) -from pyiceberg.typedef import IcebergBaseModel -from pyiceberg.types import NestedField, StructType +from pyiceberg.typedef import IcebergBaseModel, Record +from pyiceberg.types import ( + DateType, + IcebergType, + NestedField, + PrimitiveType, + StructType, + TimestampType, + TimestamptzType, + UUIDType, +) +from pyiceberg.utils.datetime import date_to_days, datetime_to_micros INITIAL_PARTITION_SPEC_ID = 0 PARTITION_FIELD_ID_START: int = 1000 @@ -199,6 +221,23 @@ def partition_type(self, schema: Schema) -> StructType: nested_fields.append(NestedField(field.field_id, field.name, result_type, required=False)) return StructType(*nested_fields) + def partition_to_path(self, data: Record, schema: Schema) -> str: + partition_type = self.partition_type(schema) + field_types = partition_type.fields + + field_strs = [] + value_strs = [] + for pos, value in enumerate(data.record_fields()): + partition_field = self.fields[pos] + value_str = partition_field.transform.to_human_string(field_types[pos].field_type, value=value) + + value_str = quote(value_str, safe='') + value_strs.append(value_str) + field_strs.append(partition_field.name) + + path = "/".join([field_str + "=" + value_str for field_str, value_str in zip(field_strs, value_strs)]) + return path + UNPARTITIONED_PARTITION_SPEC = PartitionSpec(spec_id=0) @@ -326,3 +365,59 @@ def _visit_partition_field(schema: Schema, field: PartitionField, visitor: Parti return visitor.unknown(field.field_id, source_name, field.source_id, repr(transform)) else: raise ValueError(f"Unknown transform {transform}") + + +@dataclass(frozen=True) +class PartitionFieldValue: + field: PartitionField + value: Any + + +@dataclass(frozen=True) +class PartitionKey: + raw_partition_field_values: List[PartitionFieldValue] + partition_spec: PartitionSpec + schema: Schema + + @cached_property + def partition(self) -> Record: # partition key transformed with iceberg internal representation as input + iceberg_typed_key_values = {} + for raw_partition_field_value in self.raw_partition_field_values: + partition_fields = self.partition_spec.source_id_to_fields_map[raw_partition_field_value.field.source_id] + if len(partition_fields) != 1: + raise ValueError("partition_fields must contain exactly one field.") + partition_field = partition_fields[0] + iceberg_type = self.schema.find_field(name_or_id=raw_partition_field_value.field.source_id).field_type + iceberg_typed_value = _to_partition_representation(iceberg_type, raw_partition_field_value.value) + transformed_value = partition_field.transform.transform(iceberg_type)(iceberg_typed_value) + iceberg_typed_key_values[partition_field.name] = transformed_value + return Record(**iceberg_typed_key_values) + + def to_path(self) -> str: + return self.partition_spec.partition_to_path(self.partition, self.schema) + + +@singledispatch +def _to_partition_representation(type: IcebergType, value: Any) -> Any: + return TypeError(f"Unsupported partition field type: {type}") + + +@_to_partition_representation.register(TimestampType) +@_to_partition_representation.register(TimestamptzType) +def _(type: IcebergType, value: Optional[datetime]) -> Optional[int]: + return datetime_to_micros(value) if value is not None else None + + +@_to_partition_representation.register(DateType) +def _(type: IcebergType, value: Optional[date]) -> Optional[int]: + return date_to_days(value) if value is not None else None + + +@_to_partition_representation.register(UUIDType) +def _(type: IcebergType, value: Optional[uuid.UUID]) -> Optional[str]: + return str(value) if value is not None else None + + +@_to_partition_representation.register(PrimitiveType) +def _(type: IcebergType, value: Optional[Any]) -> Optional[Any]: + return value diff --git a/pyiceberg/transforms.py b/pyiceberg/transforms.py index 9f499a3dd4..e678a77e69 100644 --- a/pyiceberg/transforms.py +++ b/pyiceberg/transforms.py @@ -655,6 +655,11 @@ def _(value: int, _type: IcebergType) -> str: return _int_to_human_string(_type, value) +@_human_string.register(bool) +def _(value: bool, _type: IcebergType) -> str: + return str(value).lower() + + @singledispatch def _int_to_human_string(_type: IcebergType, value: int) -> str: return str(value) diff --git a/tests/conftest.py b/tests/conftest.py index aa66f36046..5b488c70f0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -46,9 +46,10 @@ import boto3 import pytest from moto import mock_aws +from pyspark.sql import SparkSession from pyiceberg import schema -from pyiceberg.catalog import Catalog +from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.catalog.noop import NoopCatalog from pyiceberg.expressions import BoundReference from pyiceberg.io import ( @@ -1925,3 +1926,51 @@ def table_v2(example_table_metadata_v2: Dict[str, Any]) -> Table: @pytest.fixture def bound_reference_str() -> BoundReference[str]: return BoundReference(field=NestedField(1, "field", StringType(), required=False), accessor=Accessor(position=0, inner=None)) + + +@pytest.fixture(scope="session") +def session_catalog() -> Catalog: + return load_catalog( + "local", + **{ + "type": "rest", + "uri": "http://localhost:8181", + "s3.endpoint": "http://localhost:9000", + "s3.access-key-id": "admin", + "s3.secret-access-key": "password", + }, + ) + + +@pytest.fixture(scope="session") +def spark() -> SparkSession: + import importlib.metadata + import os + + spark_version = ".".join(importlib.metadata.version("pyspark").split(".")[:2]) + scala_version = "2.12" + iceberg_version = "1.4.3" + + os.environ["PYSPARK_SUBMIT_ARGS"] = ( + f"--packages org.apache.iceberg:iceberg-spark-runtime-{spark_version}_{scala_version}:{iceberg_version}," + f"org.apache.iceberg:iceberg-aws-bundle:{iceberg_version} pyspark-shell" + ) + os.environ["AWS_REGION"] = "us-east-1" + os.environ["AWS_ACCESS_KEY_ID"] = "admin" + os.environ["AWS_SECRET_ACCESS_KEY"] = "password" + + spark = ( + SparkSession.builder.appName("PyIceberg integration test") + .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") + .config("spark.sql.catalog.integration", "org.apache.iceberg.spark.SparkCatalog") + .config("spark.sql.catalog.integration.catalog-impl", "org.apache.iceberg.rest.RESTCatalog") + .config("spark.sql.catalog.integration.uri", "http://localhost:8181") + .config("spark.sql.catalog.integration.io-impl", "org.apache.iceberg.aws.s3.S3FileIO") + .config("spark.sql.catalog.integration.warehouse", "s3://warehouse/wh/") + .config("spark.sql.catalog.integration.s3.endpoint", "http://localhost:9000") + .config("spark.sql.catalog.integration.s3.path-style-access", "true") + .config("spark.sql.defaultCatalog", "integration") + .getOrCreate() + ) + + return spark diff --git a/tests/integration/test_partitioning_key.py b/tests/integration/test_partitioning_key.py new file mode 100644 index 0000000000..12056bac1e --- /dev/null +++ b/tests/integration/test_partitioning_key.py @@ -0,0 +1,772 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint:disable=redefined-outer-name +import uuid +from datetime import date, datetime, timedelta, timezone +from decimal import Decimal +from typing import Any, List + +import pytest +from pyspark.sql import SparkSession +from pyspark.sql.utils import AnalysisException + +from pyiceberg.catalog import Catalog +from pyiceberg.partitioning import PartitionField, PartitionFieldValue, PartitionKey, PartitionSpec +from pyiceberg.schema import Schema +from pyiceberg.transforms import ( + BucketTransform, + DayTransform, + HourTransform, + IdentityTransform, + MonthTransform, + TruncateTransform, + YearTransform, +) +from pyiceberg.typedef import Record +from pyiceberg.types import ( + BinaryType, + BooleanType, + DateType, + DecimalType, + DoubleType, + FixedType, + FloatType, + IntegerType, + LongType, + NestedField, + StringType, + TimestampType, + TimestamptzType, + UUIDType, +) + +TABLE_SCHEMA = Schema( + NestedField(field_id=1, name="boolean_field", field_type=BooleanType(), required=False), + NestedField(field_id=2, name="string_field", field_type=StringType(), required=False), + NestedField(field_id=3, name="string_long_field", field_type=StringType(), required=False), + NestedField(field_id=4, name="int_field", field_type=IntegerType(), required=False), + NestedField(field_id=5, name="long_field", field_type=LongType(), required=False), + NestedField(field_id=6, name="float_field", field_type=FloatType(), required=False), + NestedField(field_id=7, name="double_field", field_type=DoubleType(), required=False), + NestedField(field_id=8, name="timestamp_field", field_type=TimestampType(), required=False), + NestedField(field_id=9, name="timestamptz_field", field_type=TimestamptzType(), required=False), + NestedField(field_id=10, name="date_field", field_type=DateType(), required=False), + # NestedField(field_id=11, name="time", field_type=TimeType(), required=False), + NestedField(field_id=11, name="binary_field", field_type=BinaryType(), required=False), + NestedField(field_id=12, name="fixed_field", field_type=FixedType(16), required=False), + NestedField(field_id=13, name="decimal_field", field_type=DecimalType(5, 2), required=False), + NestedField(field_id=14, name="uuid_field", field_type=UUIDType(), required=False), +) + + +identifier = "default.test_table" + + +@pytest.mark.parametrize( + "partition_fields, partition_values, expected_partition_record, expected_hive_partition_path_slice, spark_create_table_sql_for_justification, spark_data_insert_sql_for_justification", + [ + # # Identity Transform + ( + [PartitionField(source_id=1, field_id=1001, transform=IdentityTransform(), name="boolean_field")], + [False], + Record(boolean_field=False), + "boolean_field=false", + f"""CREATE TABLE {identifier} ( + boolean_field boolean, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(boolean_field) -- Partitioning by 'boolean_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (false, 'Boolean field set to false'); + """, + ), + ( + [PartitionField(source_id=2, field_id=1001, transform=IdentityTransform(), name="string_field")], + ["sample_string"], + Record(string_field="sample_string"), + "string_field=sample_string", + f"""CREATE TABLE {identifier} ( + string_field string, + another_string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(string_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + ('sample_string', 'Another string value') + """, + ), + ( + [PartitionField(source_id=4, field_id=1001, transform=IdentityTransform(), name="int_field")], + [42], + Record(int_field=42), + "int_field=42", + f"""CREATE TABLE {identifier} ( + int_field int, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(int_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (42, 'Associated string value for int 42') + """, + ), + ( + [PartitionField(source_id=5, field_id=1001, transform=IdentityTransform(), name="long_field")], + [1234567890123456789], + Record(long_field=1234567890123456789), + "long_field=1234567890123456789", + f"""CREATE TABLE {identifier} ( + long_field bigint, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(long_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (1234567890123456789, 'Associated string value for long 1234567890123456789') + """, + ), + ( + [PartitionField(source_id=6, field_id=1001, transform=IdentityTransform(), name="float_field")], + [3.14], + Record(float_field=3.14), + "float_field=3.14", + # spark writes differently as pyiceberg, Record[float_field=3.140000104904175], path:float_field=3.14 (Record has difference) + # so justification (compare expected value with spark behavior) would fail. + None, + None, + # f"""CREATE TABLE {identifier} ( + # float_field float, + # string_field string + # ) + # USING iceberg + # PARTITIONED BY ( + # identity(float_field) + # ) + # """, + # f"""INSERT INTO {identifier} + # VALUES + # (3.14, 'Associated string value for float 3.14') + # """ + ), + ( + [PartitionField(source_id=7, field_id=1001, transform=IdentityTransform(), name="double_field")], + [6.282], + Record(double_field=6.282), + "double_field=6.282", + # spark writes differently as pyiceberg, Record[double_field=6.2820000648498535] path:double_field=6.282 (Record has difference) + # so justification (compare expected value with spark behavior) would fail. + None, + None, + # f"""CREATE TABLE {identifier} ( + # double_field double, + # string_field string + # ) + # USING iceberg + # PARTITIONED BY ( + # identity(double_field) + # ) + # """, + # f"""INSERT INTO {identifier} + # VALUES + # (6.282, 'Associated string value for double 6.282') + # """ + ), + ( + [PartitionField(source_id=8, field_id=1001, transform=IdentityTransform(), name="timestamp_field")], + [datetime(2023, 1, 1, 12, 0, 1, 999)], + Record(timestamp_field=1672574401000999), + "timestamp_field=2023-01-01T12%3A00%3A01.000999", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp_ntz, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(timestamp_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999' AS TIMESTAMP_NTZ), 'Associated string value for timestamp 2023-01-01T12:00:00') + """, + ), + ( + [PartitionField(source_id=8, field_id=1001, transform=IdentityTransform(), name="timestamp_field")], + [datetime(2023, 1, 1, 12, 0, 1)], + Record(timestamp_field=1672574401000000), + "timestamp_field=2023-01-01T12%3A00%3A01", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp_ntz, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(timestamp_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01' AS TIMESTAMP_NTZ), 'Associated string value for timestamp 2023-01-01T12:00:00') + """, + ), + ( + [PartitionField(source_id=8, field_id=1001, transform=IdentityTransform(), name="timestamp_field")], + [datetime(2023, 1, 1, 12, 0, 0)], + Record(timestamp_field=1672574400000000), + "timestamp_field=2023-01-01T12%3A00%3A00", + # Spark writes differently as pyiceberg, so justification (compare expected value with spark behavior) would fail + # AssertionError: assert 'timestamp_field=2023-01-01T12%3A00%3A00' in 's3://warehouse/default/test_table/data/timestamp_field=2023-01-01T12%3A00/00000-5-f9dca69a-9fb7-4830-9ef6-62d3d7afc09e-00001.parquet' + # TLDR: CAST('2023-01-01 12:00:00' AS TIMESTAMP_NTZ) becomes 2023-01-01T12:00 in the hive partition path when spark writes it (without the seconds). + None, + None, + # f"""CREATE TABLE {identifier} ( + # timestamp_field timestamp_ntz, + # string_field string + # ) + # USING iceberg + # PARTITIONED BY ( + # identity(timestamp_field) + # ) + # """, + # f"""INSERT INTO {identifier} + # VALUES + # (CAST('2023-01-01 12:00:00' AS TIMESTAMP_NTZ), 'Associated string value for timestamp 2023-01-01T12:00:00') + # """ + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=IdentityTransform(), name="timestamptz_field")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field=1672563601000999), + "timestamptz_field=2023-01-01T09%3A00%3A01.000999%2B00%3A00", + # Spark writes differently as pyiceberg, so justification (compare expected value with spark behavior) would fail + # AssertionError: assert 'timestamptz_field=2023-01-01T09%3A00%3A01.000999%2B00%3A00' in 's3://warehouse/default/test_table/data/timestamptz_field=2023-01-01T09%3A00%3A01.000999Z/00000-5-b710fc4d-66b6-47f1-b8ae-6208f8aaa2d4-00001.parquet' + # TLDR: CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP) becomes 2023-01-01T09:00:01.000999Z in the hive partition path when spark writes it (while iceberg: timestamptz_field=2023-01-01T09:00:01.000999+00:00). + None, + None, + # f"""CREATE TABLE {identifier} ( + # timestamptz_field timestamp, + # string_field string + # ) + # USING iceberg + # PARTITIONED BY ( + # identity(timestamptz_field) + # ) + # """, + # f"""INSERT INTO {identifier} + # VALUES + # (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Associated string value for timestamp 2023-01-01 12:00:01.000999+03:00') + # """ + ), + ( + [PartitionField(source_id=10, field_id=1001, transform=IdentityTransform(), name="date_field")], + [date(2023, 1, 1)], + Record(date_field=19358), + "date_field=2023-01-01", + f"""CREATE TABLE {identifier} ( + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(date_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Associated string value for date 2023-01-01') + """, + ), + ( + [PartitionField(source_id=14, field_id=1001, transform=IdentityTransform(), name="uuid_field")], + [uuid.UUID("f47ac10b-58cc-4372-a567-0e02b2c3d479")], + Record(uuid_field="f47ac10b-58cc-4372-a567-0e02b2c3d479"), + "uuid_field=f47ac10b-58cc-4372-a567-0e02b2c3d479", + f"""CREATE TABLE {identifier} ( + uuid_field string, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(uuid_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + ('f47ac10b-58cc-4372-a567-0e02b2c3d479', 'Associated string value for UUID f47ac10b-58cc-4372-a567-0e02b2c3d479') + """, + ), + ( + [PartitionField(source_id=11, field_id=1001, transform=IdentityTransform(), name="binary_field")], + [b'example'], + Record(binary_field=b'example'), + "binary_field=ZXhhbXBsZQ%3D%3D", + f"""CREATE TABLE {identifier} ( + binary_field binary, + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(binary_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('example' AS BINARY), 'Associated string value for binary `example`') + """, + ), + ( + [PartitionField(source_id=13, field_id=1001, transform=IdentityTransform(), name="decimal_field")], + [Decimal('123.45')], + Record(decimal_field=Decimal('123.45')), + "decimal_field=123.45", + f"""CREATE TABLE {identifier} ( + decimal_field decimal(5,2), + string_field string + ) + USING iceberg + PARTITIONED BY ( + identity(decimal_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (123.45, 'Associated string value for decimal 123.45') + """, + ), + # # Year Month Day Hour Transform + # Month Transform + ( + [PartitionField(source_id=8, field_id=1001, transform=MonthTransform(), name="timestamp_field_month")], + [datetime(2023, 1, 1, 11, 55, 59, 999999)], + Record(timestamp_field_month=((2023 - 1970) * 12)), + "timestamp_field_month=2023-01", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp_ntz, + string_field string + ) + USING iceberg + PARTITIONED BY ( + month(timestamp_field) -- Partitioning by month from 'timestamp_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 11:55:59.999999' AS TIMESTAMP_NTZ), 'Event at 2023-01-01 11:55:59.999999'); + """, + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=MonthTransform(), name="timestamptz_field_month")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field_month=((2023 - 1970) * 12 + 1 - 1)), + "timestamptz_field_month=2023-01", + f"""CREATE TABLE {identifier} ( + timestamptz_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + month(timestamptz_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Event at 2023-01-01 12:00:01.000999+03:00'); + """, + ), + ( + [PartitionField(source_id=10, field_id=1001, transform=MonthTransform(), name="date_field_month")], + [date(2023, 1, 1)], + Record(date_field_month=((2023 - 1970) * 12)), + "date_field_month=2023-01", + f"""CREATE TABLE {identifier} ( + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + month(date_field) -- Partitioning by month from 'date_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Event on 2023-01-01'); + """, + ), + # Year Transform + ( + [PartitionField(source_id=8, field_id=1001, transform=YearTransform(), name="timestamp_field_year")], + [datetime(2023, 1, 1, 11, 55, 59, 999999)], + Record(timestamp_field_year=(2023 - 1970)), + "timestamp_field_year=2023", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + year(timestamp_field) -- Partitioning by year from 'timestamp_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 11:55:59.999999' AS TIMESTAMP), 'Event at 2023-01-01 11:55:59.999999'); + """, + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=YearTransform(), name="timestamptz_field_year")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field_year=53), + "timestamptz_field_year=2023", + f"""CREATE TABLE {identifier} ( + timestamptz_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + year(timestamptz_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Event at 2023-01-01 12:00:01.000999+03:00'); + """, + ), + ( + [PartitionField(source_id=10, field_id=1001, transform=YearTransform(), name="date_field_year")], + [date(2023, 1, 1)], + Record(date_field_year=(2023 - 1970)), + "date_field_year=2023", + f"""CREATE TABLE {identifier} ( + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + year(date_field) -- Partitioning by year from 'date_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Event on 2023-01-01'); + """, + ), + # # Day Transform + ( + [PartitionField(source_id=8, field_id=1001, transform=DayTransform(), name="timestamp_field_day")], + [datetime(2023, 1, 1, 11, 55, 59, 999999)], + Record(timestamp_field_day=19358), + "timestamp_field_day=2023-01-01", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + day(timestamp_field) -- Partitioning by day from 'timestamp_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Event on 2023-01-01'); + """, + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=DayTransform(), name="timestamptz_field_day")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field_day=19358), + "timestamptz_field_day=2023-01-01", + f"""CREATE TABLE {identifier} ( + timestamptz_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + day(timestamptz_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Event at 2023-01-01 12:00:01.000999+03:00'); + """, + ), + ( + [PartitionField(source_id=10, field_id=1001, transform=DayTransform(), name="date_field_day")], + [date(2023, 1, 1)], + Record(date_field_day=19358), + "date_field_day=2023-01-01", + f"""CREATE TABLE {identifier} ( + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + day(date_field) -- Partitioning by day from 'date_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01' AS DATE), 'Event on 2023-01-01'); + """, + ), + # Hour Transform + ( + [PartitionField(source_id=8, field_id=1001, transform=HourTransform(), name="timestamp_field_hour")], + [datetime(2023, 1, 1, 11, 55, 59, 999999)], + Record(timestamp_field_hour=464603), + "timestamp_field_hour=2023-01-01-11", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + hour(timestamp_field) -- Partitioning by hour from 'timestamp_field' + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 11:55:59.999999' AS TIMESTAMP), 'Event within the 11th hour of 2023-01-01'); + """, + ), + ( + [PartitionField(source_id=9, field_id=1001, transform=HourTransform(), name="timestamptz_field_hour")], + [datetime(2023, 1, 1, 12, 0, 1, 999, tzinfo=timezone(timedelta(hours=3)))], + Record(timestamptz_field_hour=464601), + "timestamptz_field_hour=2023-01-01-09", + f"""CREATE TABLE {identifier} ( + timestamptz_field timestamp, + string_field string + ) + USING iceberg + PARTITIONED BY ( + hour(timestamptz_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 12:00:01.000999+03:00' AS TIMESTAMP), 'Event at 2023-01-01 12:00:01.000999+03:00'); + """, + ), + # Truncate Transform + ( + [PartitionField(source_id=4, field_id=1001, transform=TruncateTransform(10), name="int_field_trunc")], + [12345], + Record(int_field_trunc=12340), + "int_field_trunc=12340", + f"""CREATE TABLE {identifier} ( + int_field int, + string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(int_field, 10) -- Truncating 'int_field' integer column to a width of 10 + ) + """, + f"""INSERT INTO {identifier} + VALUES + (12345, 'Sample data for int'); + """, + ), + ( + [PartitionField(source_id=5, field_id=1001, transform=TruncateTransform(2), name="bigint_field_trunc")], + [2**32 + 1], + Record(bigint_field_trunc=2**32), # 4294967296 + "bigint_field_trunc=4294967296", + f"""CREATE TABLE {identifier} ( + bigint_field bigint, + string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(bigint_field, 2) -- Truncating 'bigint_field' long column to a width of 2 + ) + """, + f"""INSERT INTO {identifier} + VALUES + (4294967297, 'Sample data for long'); + """, + ), + ( + [PartitionField(source_id=2, field_id=1001, transform=TruncateTransform(3), name="string_field_trunc")], + ["abcdefg"], + Record(string_field_trunc="abc"), + "string_field_trunc=abc", + f"""CREATE TABLE {identifier} ( + string_field string, + another_string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(string_field, 3) -- Truncating 'string_field' string column to a length of 3 characters + ) + """, + f"""INSERT INTO {identifier} + VALUES + ('abcdefg', 'Another sample for string'); + """, + ), + ( + [PartitionField(source_id=13, field_id=1001, transform=TruncateTransform(width=5), name="decimal_field_trunc")], + [Decimal('678.93')], + Record(decimal_field_trunc=Decimal('678.90')), + "decimal_field_trunc=678.90", # Assuming truncation width of 1 leads to truncating to 670 + f"""CREATE TABLE {identifier} ( + decimal_field decimal(5,2), + string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(decimal_field, 2) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (678.90, 'Associated string value for decimal 678.90') + """, + ), + ( + [PartitionField(source_id=11, field_id=1001, transform=TruncateTransform(10), name="binary_field_trunc")], + [b'HELLOICEBERG'], + Record(binary_field_trunc=b'HELLOICEBE'), + "binary_field_trunc=SEVMTE9JQ0VCRQ%3D%3D", + f"""CREATE TABLE {identifier} ( + binary_field binary, + string_field string + ) + USING iceberg + PARTITIONED BY ( + truncate(binary_field, 10) -- Truncating 'binary_field' binary column to a length of 10 bytes + ) + """, + f"""INSERT INTO {identifier} + VALUES + (binary('HELLOICEBERG'), 'Sample data for binary'); + """, + ), + # Bucket Transform + ( + [PartitionField(source_id=4, field_id=1001, transform=BucketTransform(2), name="int_field_bucket")], + [10], + Record(int_field_bucket=0), + "int_field_bucket=0", + f"""CREATE TABLE {identifier} ( + int_field int, + string_field string + ) + USING iceberg + PARTITIONED BY ( + bucket(2, int_field) -- Distributing 'int_field' across 2 buckets + ) + """, + f"""INSERT INTO {identifier} + VALUES + (10, 'Integer with value 10'); + """, + ), + # Test multiple field combinations could generate the Partition record and hive partition path correctly + ( + [ + PartitionField(source_id=8, field_id=1001, transform=YearTransform(), name="timestamp_field_year"), + PartitionField(source_id=10, field_id=1002, transform=DayTransform(), name="date_field_day"), + ], + [ + datetime(2023, 1, 1, 11, 55, 59, 999999), + date(2023, 1, 1), + ], + Record(timestamp_field_year=53, date_field_day=19358), + "timestamp_field_year=2023/date_field_day=2023-01-01", + f"""CREATE TABLE {identifier} ( + timestamp_field timestamp, + date_field date, + string_field string + ) + USING iceberg + PARTITIONED BY ( + year(timestamp_field), + day(date_field) + ) + """, + f"""INSERT INTO {identifier} + VALUES + (CAST('2023-01-01 11:55:59.999999' AS TIMESTAMP), CAST('2023-01-01' AS DATE), 'some data'); + """, + ), + ], +) +@pytest.mark.integration +def test_partition_key( + session_catalog: Catalog, + spark: SparkSession, + partition_fields: List[PartitionField], + partition_values: List[Any], + expected_partition_record: Record, + expected_hive_partition_path_slice: str, + spark_create_table_sql_for_justification: str, + spark_data_insert_sql_for_justification: str, +) -> None: + partition_field_values = [PartitionFieldValue(field, value) for field, value in zip(partition_fields, partition_values)] + spec = PartitionSpec(*partition_fields) + + key = PartitionKey( + raw_partition_field_values=partition_field_values, + partition_spec=spec, + schema=TABLE_SCHEMA, + ) + + # key.partition is used to write the metadata in DataFile, ManifestFile and all above layers + assert key.partition == expected_partition_record + # key.to_path() generates the hive partitioning part of the to-write parquet file path + assert key.to_path() == expected_hive_partition_path_slice + + # Justify expected values are not made up but conform to spark behaviors + if spark_create_table_sql_for_justification is not None and spark_data_insert_sql_for_justification is not None: + try: + spark.sql(f"drop table {identifier}") + except AnalysisException: + pass + + spark.sql(spark_create_table_sql_for_justification) + spark.sql(spark_data_insert_sql_for_justification) + + iceberg_table = session_catalog.load_table(identifier=identifier) + snapshot = iceberg_table.current_snapshot() + assert snapshot + spark_partition_for_justification = ( + snapshot.manifests(iceberg_table.io)[0].fetch_manifest_entry(iceberg_table.io)[0].data_file.partition + ) + spark_path_for_justification = ( + snapshot.manifests(iceberg_table.io)[0].fetch_manifest_entry(iceberg_table.io)[0].data_file.file_path + ) + assert spark_partition_for_justification == expected_partition_record + assert expected_hive_partition_path_slice in spark_path_for_justification diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index 1eeec2b3f7..2b851e14e9 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -94,20 +94,6 @@ ) -@pytest.fixture(scope="session") -def session_catalog() -> Catalog: - return load_catalog( - "local", - **{ - "type": "rest", - "uri": "http://localhost:8181", - "s3.endpoint": "http://localhost:9000", - "s3.access-key-id": "admin", - "s3.secret-access-key": "password", - }, - ) - - @pytest.fixture(scope="session") def pa_schema() -> pa.Schema: return pa.schema([ @@ -231,40 +217,6 @@ def table_v1_v2_appended_with_null(session_catalog: Catalog, arrow_table_with_nu assert tbl.format_version == 2, f"Expected v2, got: v{tbl.format_version}" -@pytest.fixture(scope="session") -def spark() -> SparkSession: - import importlib.metadata - import os - - spark_version = ".".join(importlib.metadata.version("pyspark").split(".")[:2]) - scala_version = "2.12" - iceberg_version = "1.4.3" - - os.environ["PYSPARK_SUBMIT_ARGS"] = ( - f"--packages org.apache.iceberg:iceberg-spark-runtime-{spark_version}_{scala_version}:{iceberg_version}," - f"org.apache.iceberg:iceberg-aws-bundle:{iceberg_version} pyspark-shell" - ) - os.environ["AWS_REGION"] = "us-east-1" - os.environ["AWS_ACCESS_KEY_ID"] = "admin" - os.environ["AWS_SECRET_ACCESS_KEY"] = "password" - - spark = ( - SparkSession.builder.appName("PyIceberg integration test") - .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") - .config("spark.sql.catalog.integration", "org.apache.iceberg.spark.SparkCatalog") - .config("spark.sql.catalog.integration.catalog-impl", "org.apache.iceberg.rest.RESTCatalog") - .config("spark.sql.catalog.integration.uri", "http://localhost:8181") - .config("spark.sql.catalog.integration.io-impl", "org.apache.iceberg.aws.s3.S3FileIO") - .config("spark.sql.catalog.integration.warehouse", "s3://warehouse/wh/") - .config("spark.sql.catalog.integration.s3.endpoint", "http://localhost:9000") - .config("spark.sql.catalog.integration.s3.path-style-access", "true") - .config("spark.sql.defaultCatalog", "integration") - .getOrCreate() - ) - - return spark - - @pytest.mark.integration @pytest.mark.parametrize("format_version", [1, 2]) def test_query_count(spark: SparkSession, format_version: int) -> None: From 36b56eb7bbc4cff91e4b9bfdf71ca0a7b8bbee5c Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Fri, 1 Mar 2024 01:38:12 +0100 Subject: [PATCH 149/169] Remove extraneous import (#485) --- tests/integration/test_writes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index 2b851e14e9..f0d1c85797 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -32,7 +32,7 @@ from pyspark.sql import SparkSession from pytest_mock.plugin import MockerFixture -from pyiceberg.catalog import Catalog, Properties, Table, load_catalog +from pyiceberg.catalog import Catalog, Properties, Table from pyiceberg.catalog.sql import SqlCatalog from pyiceberg.exceptions import NoSuchTableError from pyiceberg.schema import Schema From 13bd343ddf62524fc889f333a97891c22d53d2e3 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Sun, 3 Mar 2024 13:54:07 -0800 Subject: [PATCH 150/169] default spark session timezone to UTC in test (#494) --- tests/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/conftest.py b/tests/conftest.py index 5b488c70f0..21c036ec09 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1961,6 +1961,7 @@ def spark() -> SparkSession: spark = ( SparkSession.builder.appName("PyIceberg integration test") + .config("spark.sql.session.timeZone", "UTC") .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") .config("spark.sql.catalog.integration", "org.apache.iceberg.spark.SparkCatalog") .config("spark.sql.catalog.integration.catalog-impl", "org.apache.iceberg.rest.RESTCatalog") From 3c225a75d3c8c1c3e5598dc1e02c6f8669e4e8d0 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Sun, 3 Mar 2024 15:57:05 -0800 Subject: [PATCH 151/169] Fix dead links in docs (#493) --- .github/workflows/check-md-link.yml | 3 +++ mkdocs/docs/SUMMARY.md | 4 ++++ mkdocs/docs/configuration.md | 20 ++++++++++++++++++++ mkdocs/docs/index.md | 2 +- 4 files changed, 28 insertions(+), 1 deletion(-) diff --git a/.github/workflows/check-md-link.yml b/.github/workflows/check-md-link.yml index b91195e98a..eec019a19c 100644 --- a/.github/workflows/check-md-link.yml +++ b/.github/workflows/check-md-link.yml @@ -4,6 +4,9 @@ on: push: paths: - mkdocs/** + branches: + - 'main' + pull_request: jobs: markdown-link-check: diff --git a/mkdocs/docs/SUMMARY.md b/mkdocs/docs/SUMMARY.md index 40ba0bffd7..5cf753d4c3 100644 --- a/mkdocs/docs/SUMMARY.md +++ b/mkdocs/docs/SUMMARY.md @@ -17,6 +17,8 @@ + + - [Getting started](index.md) - [Configuration](configuration.md) - [CLI](cli.md) @@ -28,4 +30,6 @@ - [How to release](how-to-release.md) - [Code Reference](reference/) + + diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index f0eb56ebd5..11d4b0461e 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -81,6 +81,8 @@ For the FileIO there are several configuration options available: ### S3 + + | Key | Example | Description | | -------------------- | ------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | s3.endpoint | https://10.0.19.25/ | Configure an alternative endpoint of the S3 service for the FileIO to access. This could be used to use S3FileIO with any s3-compatible object storage service that has a different endpoint, or access a private S3 endpoint in a virtual private cloud. | @@ -91,8 +93,12 @@ For the FileIO there are several configuration options available: | s3.proxy-uri | http://my.proxy.com:8080 | Configure the proxy server to be used by the FileIO. | | s3.connect-timeout | 60.0 | Configure socket connection timeout, in seconds. | + + ### HDFS + + | Key | Example | Description | | -------------------- | ------------------- | ------------------------------------------------ | | hdfs.host | https://10.0.19.25/ | Configure the HDFS host to connect to | @@ -100,8 +106,12 @@ For the FileIO there are several configuration options available: | hdfs.user | user | Configure the HDFS username used for connection. | | hdfs.kerberos_ticket | kerberos_ticket | Configure the path to the Kerberos ticket cache. | + + ### Azure Data lake + + | Key | Example | Description | | ----------------------- | ----------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | adlfs.connection-string | AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqF...;BlobEndpoint=http://localhost/ | A [connection string](https://learn.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string). This could be used to use FileIO with any adlfs-compatible object storage service that has a different endpoint (like [azurite](https://github.com/azure/azurite)). | @@ -112,8 +122,12 @@ For the FileIO there are several configuration options available: | adlfs.client-id | ad667be4-b811-11ed-afa1-0242ac120002 | The client-id | | adlfs.client-secret | oCA3R6P\*ka#oa1Sms2J74z... | The client-secret | + + ### Google Cloud Storage + + | Key | Example | Description | | -------------------------- | ------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | gcs.project-id | my-gcp-project | Configure Google Cloud Project for GCS FileIO. | @@ -128,6 +142,8 @@ For the FileIO there are several configuration options available: | gcs.default-location | US | Configure the default location where buckets are created, like 'US' or 'EUROPE-WEST3'. | | gcs.version-aware | False | Configure whether to support object versioning on the GCS bucket. | + + ## REST Catalog ```yaml @@ -145,6 +161,8 @@ catalog: cabundle: /absolute/path/to/cabundle.pem ``` + + | Key | Example | Description | | ---------------------- | ----------------------- | -------------------------------------------------------------------------------------------------- | | uri | https://rest-catalog/ws | URI identifying the REST Server | @@ -156,6 +174,8 @@ catalog: | rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | | rest.authorization-url | https://auth-service/cc | Authentication URL to use for client credentials authentication (default: uri + 'v1/oauth/tokens') | + + ### Headers in RESTCatalog To configure custom headers in RESTCatalog, include them in the catalog properties with the prefix `header.`. This diff --git a/mkdocs/docs/index.md b/mkdocs/docs/index.md index a8c2c6bd3c..1fee9cc69b 100644 --- a/mkdocs/docs/index.md +++ b/mkdocs/docs/index.md @@ -61,7 +61,7 @@ You either need to install `s3fs`, `adlfs`, `gcsfs`, or `pyarrow` to be able to ## Connecting to a catalog -Iceberg leverages the [catalog to have one centralized place to organize the tables](https://iceberg.apache.org/catalog/). This can be a traditional Hive catalog to store your Iceberg tables next to the rest, a vendor solution like the AWS Glue catalog, or an implementation of Icebergs' own [REST protocol](https://github.com/apache/iceberg/tree/main/open-api). Checkout the [configuration](configuration.md) page to find all the configuration details. +Iceberg leverages the [catalog to have one centralized place to organize the tables](https://iceberg.apache.org/concepts/catalog/). This can be a traditional Hive catalog to store your Iceberg tables next to the rest, a vendor solution like the AWS Glue catalog, or an implementation of Icebergs' own [REST protocol](https://github.com/apache/iceberg/tree/main/open-api). Checkout the [configuration](configuration.md) page to find all the configuration details. For the sake of demonstration, we'll configure the catalog to use the `SqlCatalog` implementation, which will store information in a local `sqlite` database. We'll also configure the catalog to store data files in the local filesystem instead of an object store. This should not be used in production due to the limited scalability. From 43eeee2ac2da824351fe64b1e446c3e52d285a12 Mon Sep 17 00:00:00 2001 From: Andre Luis Anastacio Date: Tue, 5 Mar 2024 04:39:48 -0300 Subject: [PATCH 152/169] Update bug issue template (#496) --- .github/ISSUE_TEMPLATE/iceberg_bug_report.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml b/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml index 41110cf397..f907f681b1 100644 --- a/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/iceberg_bug_report.yml @@ -9,7 +9,8 @@ body: description: What Apache Iceberg version are you using? multiple: false options: - - "0.5.0 (latest release)" + - "0.6.0 (latest release)" + - "0.5.0" - "0.4.0" - "0.3.0" - "0.2.0" From 7688633ce72d9896b66ca1792bc7c3ae8d926e86 Mon Sep 17 00:00:00 2001 From: himadripal Date: Mon, 4 Mar 2024 23:40:30 -0800 Subject: [PATCH 153/169] Add rest scope to the documentation (#495) style fix by mdformat Co-authored-by: hpal --- mkdocs/docs/configuration.md | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index 11d4b0461e..51ce912a8e 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -163,16 +163,17 @@ catalog: -| Key | Example | Description | -| ---------------------- | ----------------------- | -------------------------------------------------------------------------------------------------- | -| uri | https://rest-catalog/ws | URI identifying the REST Server | -| ugi | t-1234:secret | Hadoop UGI for Hive client. | -| credential | t-1234:secret | Credential to use for OAuth2 credential flow when initializing the catalog | -| token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | -| rest.sigv4-enabled | true | Sign requests to the REST Server using AWS SigV4 protocol | -| rest.signing-region | us-east-1 | The region to use when SigV4 signing a request | -| rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | -| rest.authorization-url | https://auth-service/cc | Authentication URL to use for client credentials authentication (default: uri + 'v1/oauth/tokens') | +| Key | Example | Description | +| ---------------------- | -------------------------------- | -------------------------------------------------------------------------------------------------- | +| uri | https://rest-catalog/ws | URI identifying the REST Server | +| ugi | t-1234:secret | Hadoop UGI for Hive client. | +| credential | t-1234:secret | Credential to use for OAuth2 credential flow when initializing the catalog | +| token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | +| scope | openid offline corpds:ds:profile | Desired scope of the requested security token (default : catalog) | +| rest.sigv4-enabled | true | Sign requests to the REST Server using AWS SigV4 protocol | +| rest.signing-region | us-east-1 | The region to use when SigV4 signing a request | +| rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | +| rest.authorization-url | https://auth-service/cc | Authentication URL to use for client credentials authentication (default: uri + 'v1/oauth/tokens') | From 0e1fe8a420cc01285c4b50cce617e919b2c1557d Mon Sep 17 00:00:00 2001 From: himadripal Date: Mon, 4 Mar 2024 23:41:03 -0800 Subject: [PATCH 154/169] Add scope as configurable option (#484) added scope as configurable option, defaults to `CATALOG_SCOPE` resolve conflicts. change to constant style fix Co-authored-by: hpal --- pyiceberg/catalog/rest.py | 6 +++++- tests/catalog/test_rest.py | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 1 deletion(-) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 2adeafb593..e7f0ddd899 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -294,7 +294,11 @@ def _fetch_access_token(self, session: Session, credential: str) -> str: client_id, client_secret = credential.split(SEMICOLON) else: client_id, client_secret = None, credential - data = {GRANT_TYPE: CLIENT_CREDENTIALS, CLIENT_ID: client_id, CLIENT_SECRET: client_secret, SCOPE: CATALOG_SCOPE} + + # take scope from properties or use default CATALOG_SCOPE + scope = self.properties.get(SCOPE) or CATALOG_SCOPE + + data = {GRANT_TYPE: CLIENT_CREDENTIALS, CLIENT_ID: client_id, CLIENT_SECRET: client_secret, SCOPE: scope} response = session.post( url=self.auth_url, data=data, headers={**session.headers, "Content-type": "application/x-www-form-urlencoded"} ) diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index c4668a71ec..20fdbfa4ea 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -46,6 +46,7 @@ TEST_CREDENTIALS = "client:secret" TEST_AUTH_URL = "https://auth-endpoint/" TEST_TOKEN = "some_jwt_token" +TEST_SCOPE = "openid_offline_corpds_ds_profile" TEST_HEADERS = { "Content-type": "application/json", "X-Client-Version": "0.14.1", @@ -136,6 +137,43 @@ def test_token_200_without_optional_fields(rest_mock: Mocker) -> None: ) +def test_token_with_default_scope(rest_mock: Mocker) -> None: + mock_request = rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS)._session.headers["Authorization"] == f"Bearer {TEST_TOKEN}" + ) + assert "catalog" in mock_request.last_request.text + + +def test_token_with_custom_scope(rest_mock: Mocker) -> None: + mock_request = rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS, scope=TEST_SCOPE)._session.headers["Authorization"] + == f"Bearer {TEST_TOKEN}" + ) + assert TEST_SCOPE in mock_request.last_request.text + + def test_token_200_w_auth_url(rest_mock: Mocker) -> None: rest_mock.post( TEST_AUTH_URL, From 9d01b02f14e761b36fe0b88a82274037f7ced5a7 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Mon, 4 Mar 2024 23:47:13 -0800 Subject: [PATCH 155/169] Explictly check for `schema_id` in tests (#487) --- tests/catalog/test_base.py | 8 ++--- tests/integration/test_partition_evolution.py | 2 +- tests/integration/test_reads.py | 1 - tests/integration/test_rest_schema.py | 4 +-- tests/table/test_init.py | 31 +++++++++++-------- tests/table/test_metadata.py | 1 - 6 files changed, 25 insertions(+), 22 deletions(-) diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index c7d3f01ff1..1defb0996f 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -614,9 +614,9 @@ def test_add_column(catalog: InMemoryCatalog) -> None: NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), NestedField(field_id=4, name="new_column1", field_type=IntegerType(), required=False), - schema_id=0, identifier_field_ids=[], ) + assert given_table.schema().schema_id == 1 transaction = given_table.transaction() transaction.update_schema().add_column(path="new_column2", field_type=IntegerType(), doc="doc").commit() @@ -628,9 +628,9 @@ def test_add_column(catalog: InMemoryCatalog) -> None: NestedField(field_id=3, name="z", field_type=LongType(), required=True), NestedField(field_id=4, name="new_column1", field_type=IntegerType(), required=False), NestedField(field_id=5, name="new_column2", field_type=IntegerType(), required=False, doc="doc"), - schema_id=0, identifier_field_ids=[], ) + assert given_table.schema().schema_id == 2 def test_add_column_with_statement(catalog: InMemoryCatalog) -> None: @@ -644,9 +644,9 @@ def test_add_column_with_statement(catalog: InMemoryCatalog) -> None: NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), NestedField(field_id=4, name="new_column1", field_type=IntegerType(), required=False), - schema_id=0, identifier_field_ids=[], ) + assert given_table.schema().schema_id == 1 with given_table.transaction() as tx: tx.update_schema().add_column(path="new_column2", field_type=IntegerType(), doc="doc").commit() @@ -657,9 +657,9 @@ def test_add_column_with_statement(catalog: InMemoryCatalog) -> None: NestedField(field_id=3, name="z", field_type=LongType(), required=True), NestedField(field_id=4, name="new_column1", field_type=IntegerType(), required=False), NestedField(field_id=5, name="new_column2", field_type=IntegerType(), required=False, doc="doc"), - schema_id=0, identifier_field_ids=[], ) + assert given_table.schema().schema_id == 2 def test_catalog_repr(catalog: InMemoryCatalog) -> None: diff --git a/tests/integration/test_partition_evolution.py b/tests/integration/test_partition_evolution.py index 16feef565d..85ae32374d 100644 --- a/tests/integration/test_partition_evolution.py +++ b/tests/integration/test_partition_evolution.py @@ -419,9 +419,9 @@ def test_change_specs_and_schema_transaction(catalog: Catalog) -> None: NestedField(field_id=2, name='event_ts', field_type=TimestampType(), required=False), NestedField(field_id=3, name='str', field_type=StringType(), required=False), NestedField(field_id=4, name='col_string', field_type=StringType(), required=False), - schema_id=1, identifier_field_ids=[], ) + assert table.schema().schema_id == 1 @pytest.mark.integration diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index c03bc78a18..072fd7db25 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -93,7 +93,6 @@ def create_table(catalog: Catalog) -> Table: NestedField(field_id=2, name="int", field_type=IntegerType(), required=True), NestedField(field_id=3, name="bool", field_type=BooleanType(), required=False), NestedField(field_id=4, name="datetime", field_type=TimestampType(), required=False), - schema_id=1, ) return catalog.create_table(identifier=TABLE_NAME, schema=schema) diff --git a/tests/integration/test_rest_schema.py b/tests/integration/test_rest_schema.py index 17fb338080..4c758e4c3e 100644 --- a/tests/integration/test_rest_schema.py +++ b/tests/integration/test_rest_schema.py @@ -361,7 +361,6 @@ def test_revert_changes(simple_table: Table, table_schema_simple: Schema) -> Non NestedField(field_id=1, name='foo', field_type=StringType(), required=False), NestedField(field_id=2, name='bar', field_type=IntegerType(), required=True), NestedField(field_id=3, name='baz', field_type=BooleanType(), required=False), - schema_id=0, identifier_field_ids=[2], ), 1: Schema( @@ -369,11 +368,12 @@ def test_revert_changes(simple_table: Table, table_schema_simple: Schema) -> Non NestedField(field_id=2, name='bar', field_type=IntegerType(), required=True), NestedField(field_id=3, name='baz', field_type=BooleanType(), required=False), NestedField(field_id=4, name='data', field_type=IntegerType(), required=False), - schema_id=1, identifier_field_ids=[2], ), } assert simple_table.schema().schema_id == 0 + assert simple_table.schemas()[0].schema_id == 0 + assert simple_table.schemas()[1].schema_id == 1 @pytest.mark.integration diff --git a/tests/table/test_init.py b/tests/table/test_init.py index 04efc5f402..b8097f5fcf 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -107,26 +107,26 @@ def test_schema(table_v2: Table) -> None: NestedField(field_id=1, name="x", field_type=LongType(), required=True), NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), - schema_id=1, identifier_field_ids=[1, 2], ) + assert table_v2.schema().schema_id == 1 def test_schemas(table_v2: Table) -> None: assert table_v2.schemas() == { 0: Schema( NestedField(field_id=1, name="x", field_type=LongType(), required=True), - schema_id=0, identifier_field_ids=[], ), 1: Schema( NestedField(field_id=1, name="x", field_type=LongType(), required=True), NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), - schema_id=1, identifier_field_ids=[1, 2], ), } + assert table_v2.schemas()[0].schema_id == 0 + assert table_v2.schemas()[1].schema_id == 1 def test_spec(table_v2: Table) -> None: @@ -266,31 +266,34 @@ def test_table_scan_ref_does_not_exists(table_v2: Table) -> None: def test_table_scan_projection_full_schema(table_v2: Table) -> None: scan = table_v2.scan() - assert scan.select("x", "y", "z").projection() == Schema( + projection_schema = scan.select("x", "y", "z").projection() + assert projection_schema == Schema( NestedField(field_id=1, name="x", field_type=LongType(), required=True), NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), - schema_id=1, identifier_field_ids=[1, 2], ) + assert projection_schema.schema_id == 1 def test_table_scan_projection_single_column(table_v2: Table) -> None: scan = table_v2.scan() - assert scan.select("y").projection() == Schema( + projection_schema = scan.select("y").projection() + assert projection_schema == Schema( NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), - schema_id=1, identifier_field_ids=[2], ) + assert projection_schema.schema_id == 1 def test_table_scan_projection_single_column_case_sensitive(table_v2: Table) -> None: scan = table_v2.scan() - assert scan.with_case_sensitive(False).select("Y").projection() == Schema( + projection_schema = scan.with_case_sensitive(False).select("Y").projection() + assert projection_schema == Schema( NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), - schema_id=1, identifier_field_ids=[2], ) + assert projection_schema.schema_id == 1 def test_table_scan_projection_unknown_column(table_v2: Table) -> None: @@ -983,20 +986,22 @@ def test_correct_schema() -> None: ) # Should use the current schema, instead the one from the snapshot - assert t.scan().projection() == Schema( + projection_schema = t.scan().projection() + assert projection_schema == Schema( NestedField(field_id=1, name='x', field_type=LongType(), required=True), NestedField(field_id=2, name='y', field_type=LongType(), required=True), NestedField(field_id=3, name='z', field_type=LongType(), required=True), - schema_id=1, identifier_field_ids=[1, 2], ) + assert projection_schema.schema_id == 1 # When we explicitly filter on the commit, we want to have the schema that's linked to the snapshot - assert t.scan(snapshot_id=123).projection() == Schema( + projection_schema = t.scan(snapshot_id=123).projection() + assert projection_schema == Schema( NestedField(field_id=1, name='x', field_type=LongType(), required=True), - schema_id=0, identifier_field_ids=[], ) + assert projection_schema.schema_id == 0 with pytest.warns(UserWarning, match="Metadata does not contain schema with id: 10"): t.scan(snapshot_id=234).projection() diff --git a/tests/table/test_metadata.py b/tests/table/test_metadata.py index 97a7931cbb..c05700ecbb 100644 --- a/tests/table/test_metadata.py +++ b/tests/table/test_metadata.py @@ -120,7 +120,6 @@ def test_v1_metadata_parsing_directly(example_table_metadata_v1: Dict[str, Any]) NestedField(field_id=1, name="x", field_type=LongType(), required=True), NestedField(field_id=2, name="y", field_type=LongType(), required=True, doc="comment"), NestedField(field_id=3, name="z", field_type=LongType(), required=True), - schema_id=0, identifier_field_ids=[], ) ] From be8152994efc148021d6c6d5bca150e97c1676c0 Mon Sep 17 00:00:00 2001 From: Michel Rouly Date: Tue, 5 Mar 2024 04:02:33 -0500 Subject: [PATCH 156/169] add support for glue.id (#490) --- pyiceberg/catalog/glue.py | 26 ++++++++++++++++++++++++++ tests/catalog/test_glue.py | 21 +++++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index 089a30ba61..bd902cd1ff 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -93,6 +93,13 @@ if TYPE_CHECKING: import pyarrow as pa + +# There is a unique Glue metastore in each AWS account and each AWS region. By default, GlueCatalog chooses the Glue +# metastore to use based on the user's default AWS client credential and region setup. You can specify the Glue catalog +# ID through glue.id catalog property to point to a Glue catalog in a different AWS account. The Glue catalog ID is your +# numeric AWS account ID. +GLUE_ID = "glue.id" + # If Glue should skip archiving an old table version when creating a new version in a commit. By # default, Glue archives all old table versions after an UpdateTable call, but Glue has a default # max number of archived table versions (can be increased). So for streaming use case with lots @@ -252,6 +259,22 @@ def _construct_database_input(database_name: str, properties: Properties) -> Dat return database_input +def _register_glue_catalog_id_with_glue_client(glue: GlueClient, glue_catalog_id: str) -> None: + """ + Register the Glue Catalog ID (AWS Account ID) as a parameter on all Glue client methods. + + It's more ergonomic to do this than to pass the CatalogId as a parameter to every client call since it's an optional + parameter and boto3 does not support 'None' values for missing parameters. + """ + event_system = glue.meta.events + + def add_glue_catalog_id(params: Dict[str, str], **kwargs: Any) -> None: + if "CatalogId" not in params: + params["CatalogId"] = glue_catalog_id + + event_system.register("provide-client-params.glue", add_glue_catalog_id) + + class GlueCatalog(Catalog): def __init__(self, name: str, **properties: Any): super().__init__(name, **properties) @@ -266,6 +289,9 @@ def __init__(self, name: str, **properties: Any): ) self.glue: GlueClient = session.client("glue") + if glue_catalog_id := properties.get(GLUE_ID): + _register_glue_catalog_id_with_glue_client(self.glue, glue_catalog_id) + def _convert_glue_to_iceberg(self, glue_table: TableTypeDef) -> Table: properties: Properties = glue_table["Parameters"] diff --git a/tests/catalog/test_glue.py b/tests/catalog/test_glue.py index 6e0196c1a2..6d44d92724 100644 --- a/tests/catalog/test_glue.py +++ b/tests/catalog/test_glue.py @@ -206,6 +206,27 @@ def test_create_table_with_no_database( test_catalog.create_table(identifier=identifier, schema=table_schema_nested) +@mock_aws +def test_create_table_with_glue_catalog_id( + _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str +) -> None: + catalog_name = "glue" + catalog_id = "444444444444" + identifier = (database_name, table_name) + test_catalog = GlueCatalog( + catalog_name, **{"s3.endpoint": moto_endpoint_url, "warehouse": f"s3://{BUCKET_NAME}", "glue.id": catalog_id} + ) + test_catalog.create_namespace(namespace=database_name) + table = test_catalog.create_table(identifier, table_schema_nested) + assert table.identifier == (catalog_name,) + identifier + assert TABLE_METADATA_LOCATION_REGEX.match(table.metadata_location) + assert test_catalog._parse_metadata_version(table.metadata_location) == 0 + + glue = boto3.client("glue") + databases = glue.get_databases() + assert databases["DatabaseList"][0]["CatalogId"] == catalog_id + + @mock_aws def test_create_duplicated_table( _bucket_initialize: None, moto_endpoint_url: str, table_schema_nested: Schema, database_name: str, table_name: str From 14c021be0c7ad7570352c9f17077d67010d40e01 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Tue, 5 Mar 2024 18:41:36 -0700 Subject: [PATCH 157/169] [Bug Fix] cast None `current-snapshot-id` as -1 for Backwards Compatibility (#473) --- mkdocs/docs/configuration.md | 4 +++ pyiceberg/serializers.py | 6 ++++- pyiceberg/table/metadata.py | 9 ++++++- pyiceberg/utils/concurrent.py | 11 +------- pyiceberg/utils/config.py | 17 ++++++++++++ tests/test_serializers.py | 50 +++++++++++++++++++++++++++++++++++ tests/utils/test_config.py | 17 ++++++++++++ 7 files changed, 102 insertions(+), 12 deletions(-) create mode 100644 tests/test_serializers.py diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index 51ce912a8e..5afd6d805b 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -284,3 +284,7 @@ catalog: # Concurrency PyIceberg uses multiple threads to parallelize operations. The number of workers can be configured by supplying a `max-workers` entry in the configuration file, or by setting the `PYICEBERG_MAX_WORKERS` environment variable. The default value depends on the system hardware and Python version. See [the Python documentation](https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor) for more details. + +# Backward Compatibility + +Previous versions of Java (`<1.4.0`) implementations incorrectly assume the optional attribute `current-snapshot-id` to be a required attribute in TableMetadata. This means that if `current-snapshot-id` is missing in the metadata file (e.g. on table creation), the application will throw an exception without being able to load the table. This assumption has been corrected in more recent Iceberg versions. However, it is possible to force PyIceberg to create a table with a metadata file that will be compatible with previous versions. This can be configured by setting the `legacy-current-snapshot-id` entry as "True" in the configuration file, or by setting the `LEGACY_CURRENT_SNAPSHOT_ID` environment variable. Refer to the [PR discussion](https://github.com/apache/iceberg-python/pull/473) for more details on the issue diff --git a/pyiceberg/serializers.py b/pyiceberg/serializers.py index 6a580ead80..e2994884c6 100644 --- a/pyiceberg/serializers.py +++ b/pyiceberg/serializers.py @@ -24,6 +24,7 @@ from pyiceberg.io import InputFile, InputStream, OutputFile from pyiceberg.table.metadata import TableMetadata, TableMetadataUtil from pyiceberg.typedef import UTF8 +from pyiceberg.utils.config import Config GZIP = "gzip" @@ -127,6 +128,9 @@ def table_metadata(metadata: TableMetadata, output_file: OutputFile, overwrite: overwrite (bool): Where to overwrite the file if it already exists. Defaults to `False`. """ with output_file.create(overwrite=overwrite) as output_stream: - json_bytes = metadata.model_dump_json().encode(UTF8) + # We need to serialize None values, in order to dump `None` current-snapshot-id as `-1` + exclude_none = False if Config().get_bool("legacy-current-snapshot-id") else True + + json_bytes = metadata.model_dump_json(exclude_none=exclude_none).encode(UTF8) json_bytes = Compressor.get_compressor(output_file.location).bytes_compressor()(json_bytes) output_stream.write(json_bytes) diff --git a/pyiceberg/table/metadata.py b/pyiceberg/table/metadata.py index 931b0cfe0a..1e5f0fdcec 100644 --- a/pyiceberg/table/metadata.py +++ b/pyiceberg/table/metadata.py @@ -28,7 +28,7 @@ Union, ) -from pydantic import Field, field_validator, model_validator +from pydantic import Field, field_serializer, field_validator, model_validator from pydantic import ValidationError as PydanticValidationError from typing_extensions import Annotated @@ -50,6 +50,7 @@ Properties, ) from pyiceberg.types import transform_dict_value_to_str +from pyiceberg.utils.config import Config from pyiceberg.utils.datetime import datetime_to_millis CURRENT_SNAPSHOT_ID = "current-snapshot-id" @@ -263,6 +264,12 @@ def sort_order_by_id(self, sort_order_id: int) -> Optional[SortOrder]: """Get the sort order by sort_order_id.""" return next((sort_order for sort_order in self.sort_orders if sort_order.order_id == sort_order_id), None) + @field_serializer('current_snapshot_id') + def serialize_current_snapshot_id(self, current_snapshot_id: Optional[int]) -> Optional[int]: + if current_snapshot_id is None and Config().get_bool("legacy-current-snapshot-id"): + return -1 + return current_snapshot_id + def _generate_snapshot_id() -> int: """Generate a new Snapshot ID from a UUID. diff --git a/pyiceberg/utils/concurrent.py b/pyiceberg/utils/concurrent.py index f6c0a23a9c..805599bf41 100644 --- a/pyiceberg/utils/concurrent.py +++ b/pyiceberg/utils/concurrent.py @@ -37,13 +37,4 @@ def get_or_create() -> Executor: @staticmethod def max_workers() -> Optional[int]: """Return the max number of workers configured.""" - config = Config() - val = config.config.get("max-workers") - - if val is None: - return None - - try: - return int(val) # type: ignore - except ValueError as err: - raise ValueError(f"Max workers should be an integer or left unset. Current value: {val}") from err + return Config().get_int("max-workers") diff --git a/pyiceberg/utils/config.py b/pyiceberg/utils/config.py index e038005469..8b1b81d3a7 100644 --- a/pyiceberg/utils/config.py +++ b/pyiceberg/utils/config.py @@ -16,6 +16,7 @@ # under the License. import logging import os +from distutils.util import strtobool from typing import List, Optional import strictyaml @@ -154,3 +155,19 @@ def get_catalog_config(self, catalog_name: str) -> Optional[RecursiveDict]: assert isinstance(catalog_conf, dict), f"Configuration path catalogs.{catalog_name_lower} needs to be an object" return catalog_conf return None + + def get_int(self, key: str) -> Optional[int]: + if (val := self.config.get(key)) is not None: + try: + return int(val) # type: ignore + except ValueError as err: + raise ValueError(f"{key} should be an integer or left unset. Current value: {val}") from err + return None + + def get_bool(self, key: str) -> Optional[bool]: + if (val := self.config.get(key)) is not None: + try: + return strtobool(val) # type: ignore + except ValueError as err: + raise ValueError(f"{key} should be a boolean or left unset. Current value: {val}") from err + return None diff --git a/tests/test_serializers.py b/tests/test_serializers.py new file mode 100644 index 0000000000..140db02700 --- /dev/null +++ b/tests/test_serializers.py @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import json +import os +import uuid +from typing import Any, Dict + +import pytest +from pytest_mock import MockFixture + +from pyiceberg.serializers import ToOutputFile +from pyiceberg.table import StaticTable +from pyiceberg.table.metadata import TableMetadataV1 + + +def test_legacy_current_snapshot_id( + mocker: MockFixture, tmp_path_factory: pytest.TempPathFactory, example_table_metadata_no_snapshot_v1: Dict[str, Any] +) -> None: + from pyiceberg.io.pyarrow import PyArrowFileIO + + metadata_location = str(tmp_path_factory.mktemp("metadata") / f"{uuid.uuid4()}.metadata.json") + metadata = TableMetadataV1(**example_table_metadata_no_snapshot_v1) + ToOutputFile.table_metadata(metadata, PyArrowFileIO().new_output(location=metadata_location), overwrite=True) + static_table = StaticTable.from_metadata(metadata_location) + assert static_table.metadata.current_snapshot_id is None + + mocker.patch.dict(os.environ, values={"PYICEBERG_LEGACY_CURRENT_SNAPSHOT_ID": "True"}) + + ToOutputFile.table_metadata(metadata, PyArrowFileIO().new_output(location=metadata_location), overwrite=True) + with PyArrowFileIO().new_input(location=metadata_location).open() as input_stream: + metadata_json_bytes = input_stream.read() + assert json.loads(metadata_json_bytes)['current-snapshot-id'] == -1 + backwards_compatible_static_table = StaticTable.from_metadata(metadata_location) + assert backwards_compatible_static_table.metadata.current_snapshot_id is None + assert backwards_compatible_static_table.metadata == static_table.metadata diff --git a/tests/utils/test_config.py b/tests/utils/test_config.py index 5e3f72ccc6..2f15bb56d8 100644 --- a/tests/utils/test_config.py +++ b/tests/utils/test_config.py @@ -76,3 +76,20 @@ def test_merge_config() -> None: rhs: RecursiveDict = {"common_key": "xyz789"} result = merge_config(lhs, rhs) assert result["common_key"] == rhs["common_key"] + + +def test_from_configuration_files_get_typed_value(tmp_path_factory: pytest.TempPathFactory) -> None: + config_path = str(tmp_path_factory.mktemp("config")) + with open(f"{config_path}/.pyiceberg.yaml", "w", encoding=UTF8) as file: + yaml_str = as_document({"max-workers": "4", "legacy-current-snapshot-id": "True"}).as_yaml() + file.write(yaml_str) + + os.environ["PYICEBERG_HOME"] = config_path + with pytest.raises(ValueError): + Config().get_bool("max-workers") + + with pytest.raises(ValueError): + Config().get_int("legacy-current-snapshot-id") + + assert Config().get_bool("legacy-current-snapshot-id") + assert Config().get_int("max-workers") == 4 From 29fd42c9b81db2bde66b81ead231408dcd60661c Mon Sep 17 00:00:00 2001 From: Himadri Pal Date: Wed, 6 Mar 2024 10:23:03 -0800 Subject: [PATCH 158/169] Make optional OAuth2 request parameters configurable (#486) --- mkdocs/docs/configuration.md | 2 ++ pyiceberg/catalog/rest.py | 18 ++++++++++++--- tests/catalog/test_rest.py | 45 ++++++++++++++++++++++++++++++++++++ 3 files changed, 62 insertions(+), 3 deletions(-) diff --git a/mkdocs/docs/configuration.md b/mkdocs/docs/configuration.md index 5afd6d805b..93b198c328 100644 --- a/mkdocs/docs/configuration.md +++ b/mkdocs/docs/configuration.md @@ -170,6 +170,8 @@ catalog: | credential | t-1234:secret | Credential to use for OAuth2 credential flow when initializing the catalog | | token | FEW23.DFSDF.FSDF | Bearer token value to use for `Authorization` header | | scope | openid offline corpds:ds:profile | Desired scope of the requested security token (default : catalog) | +| resource | rest_catalog.iceberg.com | URI for the target resource or service | +| audience | rest_catalog | Logical name of target resource or service | | rest.sigv4-enabled | true | Sign requests to the REST Server using AWS SigV4 protocol | | rest.signing-region | us-east-1 | The region to use when SigV4 signing a request | | rest.signing-name | execute-api | The service signing name to use when SigV4 signing a request | diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index e7f0ddd899..79fc37a398 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -105,6 +105,8 @@ class Endpoints: CREDENTIAL = "credential" GRANT_TYPE = "grant_type" SCOPE = "scope" +AUDIENCE = "audience" +RESOURCE = "resource" TOKEN_EXCHANGE = "urn:ietf:params:oauth:grant-type:token-exchange" SEMICOLON = ":" KEY = "key" @@ -289,16 +291,26 @@ def auth_url(self) -> str: else: return self.url(Endpoints.get_token, prefixed=False) + def _extract_optional_oauth_params(self) -> Dict[str, str]: + optional_oauth_param = {SCOPE: self.properties.get(SCOPE) or CATALOG_SCOPE} + set_of_optional_params = {AUDIENCE, RESOURCE} + for param in set_of_optional_params: + if param_value := self.properties.get(param): + optional_oauth_param[param] = param_value + + return optional_oauth_param + def _fetch_access_token(self, session: Session, credential: str) -> str: if SEMICOLON in credential: client_id, client_secret = credential.split(SEMICOLON) else: client_id, client_secret = None, credential - # take scope from properties or use default CATALOG_SCOPE - scope = self.properties.get(SCOPE) or CATALOG_SCOPE + data = {GRANT_TYPE: CLIENT_CREDENTIALS, CLIENT_ID: client_id, CLIENT_SECRET: client_secret} + + optional_oauth_params = self._extract_optional_oauth_params() + data.update(optional_oauth_params) - data = {GRANT_TYPE: CLIENT_CREDENTIALS, CLIENT_ID: client_id, CLIENT_SECRET: client_secret, SCOPE: scope} response = session.post( url=self.auth_url, data=data, headers={**session.headers, "Content-type": "application/x-www-form-urlencoded"} ) diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 20fdbfa4ea..51bc286267 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -47,6 +47,9 @@ TEST_AUTH_URL = "https://auth-endpoint/" TEST_TOKEN = "some_jwt_token" TEST_SCOPE = "openid_offline_corpds_ds_profile" +TEST_AUDIENCE = "test_audience" +TEST_RESOURCE = "test_resource" + TEST_HEADERS = { "Content-type": "application/json", "X-Client-Version": "0.14.1", @@ -137,6 +140,48 @@ def test_token_200_without_optional_fields(rest_mock: Mocker) -> None: ) +def test_token_with_optional_oauth_params(rest_mock: Mocker) -> None: + mock_request = rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog( + "rest", uri=TEST_URI, credential=TEST_CREDENTIALS, audience=TEST_AUDIENCE, resource=TEST_RESOURCE + )._session.headers["Authorization"] + == f"Bearer {TEST_TOKEN}" + ) + assert TEST_AUDIENCE in mock_request.last_request.text + assert TEST_RESOURCE in mock_request.last_request.text + + +def test_token_with_optional_oauth_params_as_empty(rest_mock: Mocker) -> None: + mock_request = rest_mock.post( + f"{TEST_URI}v1/oauth/tokens", + json={ + "access_token": TEST_TOKEN, + "token_type": "Bearer", + "expires_in": 86400, + "issued_token_type": "urn:ietf:params:oauth:token-type:access_token", + }, + status_code=200, + request_headers=OAUTH_TEST_HEADERS, + ) + assert ( + RestCatalog("rest", uri=TEST_URI, credential=TEST_CREDENTIALS, audience="", resource="")._session.headers["Authorization"] + == f"Bearer {TEST_TOKEN}" + ) + assert TEST_AUDIENCE not in mock_request.last_request.text + assert TEST_RESOURCE not in mock_request.last_request.text + + def test_token_with_default_scope(rest_mock: Mocker) -> None: mock_request = rest_mock.post( f"{TEST_URI}v1/oauth/tokens", From e56326d4a3ba74eb279bf57202a9121c4f2c731d Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Wed, 6 Mar 2024 16:35:24 -0800 Subject: [PATCH 159/169] Disable Spark Catalog caching for integration tests (#501) --- tests/conftest.py | 1 + tests/integration/test_writes.py | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 21c036ec09..a005966ea5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1965,6 +1965,7 @@ def spark() -> SparkSession: .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") .config("spark.sql.catalog.integration", "org.apache.iceberg.spark.SparkCatalog") .config("spark.sql.catalog.integration.catalog-impl", "org.apache.iceberg.rest.RESTCatalog") + .config("spark.sql.catalog.integration.cache-enabled", "false") .config("spark.sql.catalog.integration.uri", "http://localhost:8181") .config("spark.sql.catalog.integration.io-impl", "org.apache.iceberg.aws.s3.S3FileIO") .config("spark.sql.catalog.integration.warehouse", "s3://warehouse/wh/") diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index f0d1c85797..3b6d476b74 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -355,6 +355,28 @@ def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_w assert [row.deleted_data_files_count for row in rows] == [0, 0, 1, 0, 0] +@pytest.mark.integration +def test_python_writes_with_spark_snapshot_reads( + spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table +) -> None: + identifier = "default.python_writes_with_spark_snapshot_reads" + tbl = _create_table(session_catalog, identifier, {"format-version": "1"}, []) + + def get_current_snapshot_id(identifier: str) -> int: + return ( + spark.sql(f"SELECT snapshot_id FROM {identifier}.snapshots order by committed_at desc limit 1") + .collect()[0] + .snapshot_id + ) + + tbl.overwrite(arrow_table_with_null) + assert tbl.current_snapshot().snapshot_id == get_current_snapshot_id(identifier) # type: ignore + tbl.overwrite(arrow_table_with_null) + assert tbl.current_snapshot().snapshot_id == get_current_snapshot_id(identifier) # type: ignore + tbl.append(arrow_table_with_null) + assert tbl.current_snapshot().snapshot_id == get_current_snapshot_id(identifier) # type: ignore + + @pytest.mark.integration @pytest.mark.parametrize("format_version", [1, 2]) @pytest.mark.parametrize( From ad405734c5e187baadb8cad91f176733cf526b7a Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Fri, 8 Mar 2024 01:00:33 -0800 Subject: [PATCH 160/169] Set table properties with dictionary (#503) --- pyiceberg/table/__init__.py | 8 ++++++-- tests/integration/test_reads.py | 34 ++++++++++++++++++++++++++++++--- 2 files changed, 37 insertions(+), 5 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 1a4183c914..13e1ac1da5 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -294,17 +294,21 @@ def upgrade_table_version(self, format_version: Literal[1, 2]) -> Transaction: return self - def set_properties(self, **updates: str) -> Transaction: + def set_properties(self, properties: Properties = EMPTY_DICT, **kwargs: str) -> Transaction: """Set properties. When a property is already set, it will be overwritten. Args: - updates: The properties set on the table. + properties: The properties set on the table. + kwargs: properties can also be pass as kwargs. Returns: The alter table builder. """ + if properties and kwargs: + raise ValueError("Cannot pass both properties and kwargs") + updates = properties or kwargs return self._apply((SetPropertiesUpdate(updates=updates),)) def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive: bool = True) -> UpdateSchema: diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index 072fd7db25..da43e7825e 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -107,22 +107,50 @@ def test_table_properties(catalog: Catalog) -> None: with table.transaction() as transaction: transaction.set_properties(abc="🤪") - assert table.properties == dict(abc="🤪", **DEFAULT_PROPERTIES) with table.transaction() as transaction: transaction.remove_properties("abc") - assert table.properties == DEFAULT_PROPERTIES table = table.transaction().set_properties(abc="def").commit_transaction() - assert table.properties == dict(abc="def", **DEFAULT_PROPERTIES) table = table.transaction().remove_properties("abc").commit_transaction() + assert table.properties == DEFAULT_PROPERTIES + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_table_properties_dict(catalog: Catalog) -> None: + table = create_table(catalog) assert table.properties == DEFAULT_PROPERTIES + with table.transaction() as transaction: + transaction.set_properties({"abc": "🤪"}) + assert table.properties == dict({"abc": "🤪"}, **DEFAULT_PROPERTIES) + + with table.transaction() as transaction: + transaction.remove_properties("abc") + assert table.properties == DEFAULT_PROPERTIES + + table = table.transaction().set_properties({"abc": "def"}).commit_transaction() + assert table.properties == dict({"abc": "def"}, **DEFAULT_PROPERTIES) + + table = table.transaction().remove_properties("abc").commit_transaction() + assert table.properties == DEFAULT_PROPERTIES + + +@pytest.mark.integration +@pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) +def test_table_properties_error(catalog: Catalog) -> None: + table = create_table(catalog) + properties = {"abc": "def"} + with pytest.raises(ValueError) as e: + table.transaction().set_properties(properties, abc="def").commit_transaction() + assert "Cannot pass both properties and kwargs" in str(e.value) + @pytest.mark.integration @pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) From afdfa351119090f09d38ef72857d6303e691f5ad Mon Sep 17 00:00:00 2001 From: Andre Luis Anastacio Date: Fri, 8 Mar 2024 06:11:30 -0300 Subject: [PATCH 161/169] Imports decouple (#505) --- pyiceberg/catalog/glue.py | 4 +--- pyiceberg/catalog/hive.py | 4 +--- pyiceberg/catalog/noop.py | 3 +-- pyiceberg/catalog/rest.py | 2 +- pyiceberg/catalog/sql.py | 4 +--- pyiceberg/expressions/visitors.py | 3 +-- pyiceberg/io/pyarrow.py | 2 +- pyiceberg/table/__init__.py | 13 +++++++------ tests/catalog/test_base.py | 4 +--- tests/catalog/test_rest.py | 3 ++- tests/catalog/test_sql.py | 2 +- tests/expressions/test_parser.py | 2 +- tests/integration/test_writes.py | 5 +++-- tests/io/test_pyarrow.py | 2 +- tests/table/test_init.py | 2 +- tests/table/test_metadata.py | 3 +-- tests/test_schema.py | 2 +- tests/utils/test_manifest.py | 3 +-- 18 files changed, 27 insertions(+), 36 deletions(-) diff --git a/pyiceberg/catalog/glue.py b/pyiceberg/catalog/glue.py index bd902cd1ff..adec150d84 100644 --- a/pyiceberg/catalog/glue.py +++ b/pyiceberg/catalog/glue.py @@ -46,8 +46,6 @@ PREVIOUS_METADATA_LOCATION, TABLE_TYPE, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( @@ -67,7 +65,7 @@ from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, update_table_metadata from pyiceberg.table.metadata import TableMetadata, new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder -from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties from pyiceberg.types import ( BinaryType, BooleanType, diff --git a/pyiceberg/catalog/hive.py b/pyiceberg/catalog/hive.py index c24355f6fb..18bbcfe084 100644 --- a/pyiceberg/catalog/hive.py +++ b/pyiceberg/catalog/hive.py @@ -59,8 +59,6 @@ METADATA_LOCATION, TABLE_TYPE, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( @@ -79,7 +77,7 @@ from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, TableProperties, update_table_metadata from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder -from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties from pyiceberg.types import ( BinaryType, BooleanType, diff --git a/pyiceberg/catalog/noop.py b/pyiceberg/catalog/noop.py index a8b7154621..e294390e61 100644 --- a/pyiceberg/catalog/noop.py +++ b/pyiceberg/catalog/noop.py @@ -28,10 +28,9 @@ from pyiceberg.table import ( CommitTableRequest, CommitTableResponse, - SortOrder, Table, ) -from pyiceberg.table.sorting import UNSORTED_SORT_ORDER +from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties if TYPE_CHECKING: diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index 79fc37a398..c401339e18 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -65,8 +65,8 @@ CommitTableResponse, Table, TableIdentifier, - TableMetadata, ) +from pyiceberg.table.metadata import TableMetadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder, assign_fresh_sort_order_ids from pyiceberg.typedef import EMPTY_DICT, UTF8, IcebergBaseModel from pyiceberg.types import transform_dict_value_to_str diff --git a/pyiceberg/catalog/sql.py b/pyiceberg/catalog/sql.py index b6b2feeeb0..d44d4996b6 100644 --- a/pyiceberg/catalog/sql.py +++ b/pyiceberg/catalog/sql.py @@ -44,8 +44,6 @@ from pyiceberg.catalog import ( METADATA_LOCATION, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( @@ -64,7 +62,7 @@ from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table, update_table_metadata from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder -from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties if TYPE_CHECKING: import pyarrow as pa diff --git a/pyiceberg/expressions/visitors.py b/pyiceberg/expressions/visitors.py index a185164cd1..79bc995198 100644 --- a/pyiceberg/expressions/visitors.py +++ b/pyiceberg/expressions/visitors.py @@ -54,7 +54,6 @@ BoundStartsWith, BoundTerm, BoundUnaryPredicate, - L, Not, Or, UnboundPredicate, @@ -63,7 +62,7 @@ from pyiceberg.manifest import DataFile, ManifestFile, PartitionFieldSummary from pyiceberg.partitioning import PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.typedef import EMPTY_DICT, StructProtocol +from pyiceberg.typedef import EMPTY_DICT, L, StructProtocol from pyiceberg.types import ( DoubleType, FloatType, diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index be944ffb36..7192513a2d 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -75,8 +75,8 @@ AlwaysTrue, BooleanExpression, BoundTerm, - Literal, ) +from pyiceberg.expressions.literals import Literal from pyiceberg.expressions.visitors import ( BoundBooleanExpressionVisitor, bind, diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 13e1ac1da5..09cb814a0e 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -46,6 +46,8 @@ from sortedcontainers import SortedList from typing_extensions import Annotated +import pyiceberg.expressions.parser as parser +import pyiceberg.expressions.visitors as visitors from pyiceberg.exceptions import CommitFailedException, ResolveError, ValidationError from pyiceberg.expressions import ( AlwaysTrue, @@ -53,10 +55,7 @@ BooleanExpression, EqualTo, Reference, - parser, - visitors, ) -from pyiceberg.expressions.visitors import _InclusiveMetricsEvaluator, inclusive_projection from pyiceberg.io import FileIO, load_file_io from pyiceberg.manifest import ( POSITIONAL_DELETE_SCHEMA, @@ -1379,7 +1378,9 @@ def _match_deletes_to_data_file(data_entry: ManifestEntry, positional_delete_ent relevant_entries = positional_delete_entries[positional_delete_entries.bisect_right(data_entry) :] if len(relevant_entries) > 0: - evaluator = _InclusiveMetricsEvaluator(POSITIONAL_DELETE_SCHEMA, EqualTo("file_path", data_entry.data_file.file_path)) + evaluator = visitors._InclusiveMetricsEvaluator( + POSITIONAL_DELETE_SCHEMA, EqualTo("file_path", data_entry.data_file.file_path) + ) return { positional_delete_entry.data_file for positional_delete_entry in relevant_entries @@ -1403,7 +1404,7 @@ def __init__( super().__init__(table, row_filter, selected_fields, case_sensitive, snapshot_id, options, limit) def _build_partition_projection(self, spec_id: int) -> BooleanExpression: - project = inclusive_projection(self.table.schema(), self.table.specs()[spec_id]) + project = visitors.inclusive_projection(self.table.schema(), self.table.specs()[spec_id]) return project(self.row_filter) @cached_property @@ -1470,7 +1471,7 @@ def plan_files(self) -> Iterable[FileScanTask]: # this filter depends on the partition spec used to write the manifest file partition_evaluators: Dict[int, Callable[[DataFile], bool]] = KeyDefaultDict(self._build_partition_evaluator) - metrics_evaluator = _InclusiveMetricsEvaluator( + metrics_evaluator = visitors._InclusiveMetricsEvaluator( self.table.schema(), self.row_filter, self.case_sensitive, self.options.get("include_empty_files") == "true" ).eval diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index 1defb0996f..1f0060780e 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -31,8 +31,6 @@ from pyiceberg.catalog import ( Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( @@ -58,7 +56,7 @@ from pyiceberg.table.metadata import TableMetadataV1 from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.transforms import IdentityTransform -from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties from pyiceberg.types import IntegerType, LongType, NestedField diff --git a/tests/catalog/test_rest.py b/tests/catalog/test_rest.py index 51bc286267..850e5f0180 100644 --- a/tests/catalog/test_rest.py +++ b/tests/catalog/test_rest.py @@ -23,7 +23,7 @@ from requests_mock import Mocker import pyiceberg -from pyiceberg.catalog import PropertiesUpdateSummary, Table, load_catalog +from pyiceberg.catalog import PropertiesUpdateSummary, load_catalog from pyiceberg.catalog.rest import AUTH_URL, RestCatalog from pyiceberg.exceptions import ( AuthorizationExpiredError, @@ -36,6 +36,7 @@ from pyiceberg.io import load_file_io from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema +from pyiceberg.table import Table from pyiceberg.table.metadata import TableMetadataV1 from pyiceberg.table.sorting import SortField, SortOrder from pyiceberg.transforms import IdentityTransform, TruncateTransform diff --git a/tests/catalog/test_sql.py b/tests/catalog/test_sql.py index 0b869d6826..3a77f8678a 100644 --- a/tests/catalog/test_sql.py +++ b/tests/catalog/test_sql.py @@ -25,7 +25,6 @@ from pytest_lazyfixture import lazy_fixture from sqlalchemy.exc import ArgumentError, IntegrityError -from pyiceberg.catalog import Identifier from pyiceberg.catalog.sql import SqlCatalog from pyiceberg.exceptions import ( CommitFailedException, @@ -49,6 +48,7 @@ SortOrder, ) from pyiceberg.transforms import IdentityTransform +from pyiceberg.typedef import Identifier from pyiceberg.types import IntegerType diff --git a/tests/expressions/test_parser.py b/tests/expressions/test_parser.py index 3ce2f2226c..0bccc9b80f 100644 --- a/tests/expressions/test_parser.py +++ b/tests/expressions/test_parser.py @@ -17,6 +17,7 @@ import pytest from pyparsing import ParseException +import pyiceberg.expressions.parser as parser from pyiceberg.expressions import ( AlwaysFalse, AlwaysTrue, @@ -37,7 +38,6 @@ NotStartsWith, Or, StartsWith, - parser, ) diff --git a/tests/integration/test_writes.py b/tests/integration/test_writes.py index 3b6d476b74..82c4ace711 100644 --- a/tests/integration/test_writes.py +++ b/tests/integration/test_writes.py @@ -32,11 +32,12 @@ from pyspark.sql import SparkSession from pytest_mock.plugin import MockerFixture -from pyiceberg.catalog import Catalog, Properties, Table +from pyiceberg.catalog import Catalog from pyiceberg.catalog.sql import SqlCatalog from pyiceberg.exceptions import NoSuchTableError from pyiceberg.schema import Schema -from pyiceberg.table import _dataframe_to_data_files +from pyiceberg.table import Table, _dataframe_to_data_files +from pyiceberg.typedef import Properties from pyiceberg.types import ( BinaryType, BooleanType, diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index a3dd56db7f..2acffdfdf9 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -53,8 +53,8 @@ GreaterThan, Not, Or, - literal, ) +from pyiceberg.expressions.literals import literal from pyiceberg.io import InputStream, OutputStream, load_file_io from pyiceberg.io.pyarrow import ( ICEBERG_SCHEMA, diff --git a/tests/table/test_init.py b/tests/table/test_init.py index b8097f5fcf..f734211510 100644 --- a/tests/table/test_init.py +++ b/tests/table/test_init.py @@ -57,7 +57,6 @@ SetDefaultSortOrderUpdate, SetPropertiesUpdate, SetSnapshotRefUpdate, - SnapshotRef, StaticTable, Table, UpdateSchema, @@ -68,6 +67,7 @@ update_table_metadata, ) from pyiceberg.table.metadata import INITIAL_SEQUENCE_NUMBER, TableMetadataUtil, TableMetadataV2, _generate_snapshot_id +from pyiceberg.table.refs import SnapshotRef from pyiceberg.table.snapshots import ( Operation, Snapshot, diff --git a/tests/table/test_metadata.py b/tests/table/test_metadata.py index c05700ecbb..0cf17b11a2 100644 --- a/tests/table/test_metadata.py +++ b/tests/table/test_metadata.py @@ -29,7 +29,6 @@ from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.serializers import FromByteStream -from pyiceberg.table import SortOrder from pyiceberg.table.metadata import ( TableMetadataUtil, TableMetadataV1, @@ -37,7 +36,7 @@ new_table_metadata, ) from pyiceberg.table.refs import SnapshotRef, SnapshotRefType -from pyiceberg.table.sorting import NullOrder, SortDirection, SortField +from pyiceberg.table.sorting import NullOrder, SortDirection, SortField, SortOrder from pyiceberg.transforms import IdentityTransform from pyiceberg.typedef import UTF8 from pyiceberg.types import ( diff --git a/tests/test_schema.py b/tests/test_schema.py index 6394b72ba6..7e10dd5b0d 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -23,8 +23,8 @@ from pyiceberg import schema from pyiceberg.exceptions import ResolveError, ValidationError -from pyiceberg.expressions import Accessor from pyiceberg.schema import ( + Accessor, Schema, build_position_accessors, promote, diff --git a/tests/utils/test_manifest.py b/tests/utils/test_manifest.py index 6ef11a47ea..3e789cb854 100644 --- a/tests/utils/test_manifest.py +++ b/tests/utils/test_manifest.py @@ -37,8 +37,7 @@ ) from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.table import Snapshot -from pyiceberg.table.snapshots import Operation, Summary +from pyiceberg.table.snapshots import Operation, Snapshot, Summary from pyiceberg.transforms import IdentityTransform from pyiceberg.typedef import Record from pyiceberg.types import IntegerType, NestedField From 70342ac83d2d1f121f3ab04c6d7317c8830fdad1 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Fri, 8 Mar 2024 23:34:42 -0800 Subject: [PATCH 162/169] Allow setting non-string typed values in `set_properties` (#504) * pass dict properties to set_properties * whitespace * set non-string property values * test error for none * add comment * rewrite validator * properties validator --- pyiceberg/catalog/rest.py | 7 +++++-- pyiceberg/table/__init__.py | 9 +++++++-- pyiceberg/table/metadata.py | 4 +++- tests/integration/test_reads.py | 17 +++++++++++++++++ 4 files changed, 32 insertions(+), 5 deletions(-) diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index c401339e18..a5f33f02dd 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -149,9 +149,12 @@ class CreateTableRequest(IcebergBaseModel): partition_spec: Optional[PartitionSpec] = Field(alias="partition-spec") write_order: Optional[SortOrder] = Field(alias="write-order") stage_create: bool = Field(alias="stage-create", default=False) - properties: Properties = Field(default_factory=dict) + properties: Dict[str, str] = Field(default_factory=dict) + # validators - transform_properties_dict_value_to_str = field_validator('properties', mode='before')(transform_dict_value_to_str) + @field_validator('properties', mode='before') + def transform_properties_dict_value_to_str(cls, properties: Properties) -> Dict[str, str]: + return transform_dict_value_to_str(properties) class RegisterTableRequest(IcebergBaseModel): diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 09cb814a0e..76aa533d7c 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -42,7 +42,7 @@ Union, ) -from pydantic import Field, SerializeAsAny +from pydantic import Field, SerializeAsAny, field_validator from sortedcontainers import SortedList from typing_extensions import Annotated @@ -124,6 +124,7 @@ NestedField, PrimitiveType, StructType, + transform_dict_value_to_str, ) from pyiceberg.utils.concurrent import ExecutorFactory from pyiceberg.utils.datetime import datetime_to_millis @@ -293,7 +294,7 @@ def upgrade_table_version(self, format_version: Literal[1, 2]) -> Transaction: return self - def set_properties(self, properties: Properties = EMPTY_DICT, **kwargs: str) -> Transaction: + def set_properties(self, properties: Properties = EMPTY_DICT, **kwargs: Any) -> Transaction: """Set properties. When a property is already set, it will be overwritten. @@ -474,6 +475,10 @@ class SetPropertiesUpdate(TableUpdate): action: TableUpdateAction = TableUpdateAction.set_properties updates: Dict[str, str] + @field_validator('updates', mode='before') + def transform_properties_dict_value_to_str(cls, properties: Properties) -> Dict[str, str]: + return transform_dict_value_to_str(properties) + class RemovePropertiesUpdate(TableUpdate): action: TableUpdateAction = TableUpdateAction.remove_properties diff --git a/pyiceberg/table/metadata.py b/pyiceberg/table/metadata.py index 1e5f0fdcec..323f6d85a8 100644 --- a/pyiceberg/table/metadata.py +++ b/pyiceberg/table/metadata.py @@ -221,7 +221,9 @@ class TableMetadataCommonFields(IcebergBaseModel): current-snapshot-id even if the refs map is null.""" # validators - transform_properties_dict_value_to_str = field_validator('properties', mode='before')(transform_dict_value_to_str) + @field_validator('properties', mode='before') + def transform_properties_dict_value_to_str(cls, properties: Properties) -> Dict[str, str]: + return transform_dict_value_to_str(properties) def snapshot_by_id(self, snapshot_id: int) -> Optional[Snapshot]: """Get the snapshot by snapshot_id.""" diff --git a/tests/integration/test_reads.py b/tests/integration/test_reads.py index da43e7825e..fdc13ae752 100644 --- a/tests/integration/test_reads.py +++ b/tests/integration/test_reads.py @@ -24,6 +24,7 @@ import pytest from hive_metastore.ttypes import LockRequest, LockResponse, LockState, UnlockRequest from pyarrow.fs import S3FileSystem +from pydantic_core import ValidationError from pyiceberg.catalog import Catalog, load_catalog from pyiceberg.catalog.hive import HiveCatalog, _HiveClient @@ -119,6 +120,14 @@ def test_table_properties(catalog: Catalog) -> None: table = table.transaction().remove_properties("abc").commit_transaction() assert table.properties == DEFAULT_PROPERTIES + table = table.transaction().set_properties(abc=123).commit_transaction() + # properties are stored as strings in the iceberg spec + assert table.properties == dict(abc="123", **DEFAULT_PROPERTIES) + + with pytest.raises(ValidationError) as exc_info: + table.transaction().set_properties(property_name=None).commit_transaction() + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) + @pytest.mark.integration @pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) @@ -141,6 +150,14 @@ def test_table_properties_dict(catalog: Catalog) -> None: table = table.transaction().remove_properties("abc").commit_transaction() assert table.properties == DEFAULT_PROPERTIES + table = table.transaction().set_properties({"abc": 123}).commit_transaction() + # properties are stored as strings in the iceberg spec + assert table.properties == dict({"abc": "123"}, **DEFAULT_PROPERTIES) + + with pytest.raises(ValidationError) as exc_info: + table.transaction().set_properties({"property_name": None}).commit_transaction() + assert "None type is not a supported value in properties: property_name" in str(exc_info.value) + @pytest.mark.integration @pytest.mark.parametrize('catalog', [pytest.lazy_fixture('catalog_hive'), pytest.lazy_fixture('catalog_rest')]) From a222825df6b009a0dc49ffa896b21932f28c4452 Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Sat, 9 Mar 2024 23:41:07 -0700 Subject: [PATCH 163/169] [Bug fix] update name mapping in Transaction.update_schema (#508) --- pyiceberg/table/__init__.py | 7 ++++++- tests/integration/test_rest_schema.py | 8 ++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index 76aa533d7c..b244dfb16d 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -321,7 +321,12 @@ def update_schema(self, allow_incompatible_changes: bool = False, case_sensitive Returns: A new UpdateSchema. """ - return UpdateSchema(self, allow_incompatible_changes=allow_incompatible_changes, case_sensitive=case_sensitive) + return UpdateSchema( + self, + allow_incompatible_changes=allow_incompatible_changes, + case_sensitive=case_sensitive, + name_mapping=self._table.name_mapping(), + ) def update_snapshot(self) -> UpdateSnapshot: """Create a new UpdateSnapshot to produce a new snapshot for the table. diff --git a/tests/integration/test_rest_schema.py b/tests/integration/test_rest_schema.py index 4c758e4c3e..7aeb1bccdc 100644 --- a/tests/integration/test_rest_schema.py +++ b/tests/integration/test_rest_schema.py @@ -672,9 +672,13 @@ def test_rename_simple(simple_table: Table) -> None: with simple_table.update_schema() as schema_update: schema_update.rename_column("foo", "vo") + with simple_table.transaction() as txn: + with txn.update_schema() as schema_update: + schema_update.rename_column("bar", "var") + assert simple_table.schema() == Schema( NestedField(field_id=1, name="vo", field_type=StringType(), required=False), - NestedField(field_id=2, name="bar", field_type=IntegerType(), required=True), + NestedField(field_id=2, name="var", field_type=IntegerType(), required=True), NestedField(field_id=3, name="baz", field_type=BooleanType(), required=False), identifier_field_ids=[2], ) @@ -682,7 +686,7 @@ def test_rename_simple(simple_table: Table) -> None: # Check that the name mapping gets updated assert simple_table.name_mapping() == NameMapping([ MappedField(field_id=1, names=['foo', 'vo']), - MappedField(field_id=2, names=['bar']), + MappedField(field_id=2, names=['bar', 'var']), MappedField(field_id=3, names=['baz']), ]) From 0ab3262e035867beba246368c2a46d1c9387f65c Mon Sep 17 00:00:00 2001 From: Sung Yun <107272191+syun64@users.noreply.github.com> Date: Mon, 11 Mar 2024 02:02:50 -0600 Subject: [PATCH 164/169] Allow Partition data to be nullable in ManifestEntry (#509) * fix * use partition field nullability --- pyiceberg/manifest.py | 1 + pyiceberg/partitioning.py | 3 ++- tests/conftest.py | 2 +- tests/table/test_partitioning.py | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/pyiceberg/manifest.py b/pyiceberg/manifest.py index 0504626d07..03dc3199bf 100644 --- a/pyiceberg/manifest.py +++ b/pyiceberg/manifest.py @@ -308,6 +308,7 @@ def data_file_with_partition(partition_type: StructType, format_version: Literal field_id=field.field_id, name=field.name, field_type=partition_field_to_data_file_partition_field(field.field_type), + required=field.required, ) for field in partition_type.fields ]) diff --git a/pyiceberg/partitioning.py b/pyiceberg/partitioning.py index 6fa0286282..a6692b325e 100644 --- a/pyiceberg/partitioning.py +++ b/pyiceberg/partitioning.py @@ -218,7 +218,8 @@ def partition_type(self, schema: Schema) -> StructType: for field in self.fields: source_type = schema.find_type(field.source_id) result_type = field.transform.result_type(source_type) - nested_fields.append(NestedField(field.field_id, field.name, result_type, required=False)) + required = schema.find_field(field.source_id).required + nested_fields.append(NestedField(field.field_id, field.name, result_type, required=required)) return StructType(*nested_fields) def partition_to_path(self, data: Record, schema: Schema) -> str: diff --git a/tests/conftest.py b/tests/conftest.py index a005966ea5..e090e7c020 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -892,7 +892,7 @@ def metadata_location_gz(tmp_path_factory: pytest.TempPathFactory) -> str: "data_file": { "file_path": "/home/iceberg/warehouse/nyc/taxis_partitioned/data/VendorID=1/00000-633-d8a4223e-dc97-45a1-86e1-adaba6e8abd7-00002.parquet", "file_format": "PARQUET", - "partition": {"VendorID": 1, "tpep_pickup_datetime": 1925}, + "partition": {"VendorID": 1, "tpep_pickup_datetime": None}, "record_count": 95050, "file_size_in_bytes": 1265950, "block_size_in_bytes": 67108864, diff --git a/tests/table/test_partitioning.py b/tests/table/test_partitioning.py index cb60c9a8e5..d7425bc351 100644 --- a/tests/table/test_partitioning.py +++ b/tests/table/test_partitioning.py @@ -127,5 +127,5 @@ def test_partition_type(table_schema_simple: Schema) -> None: assert spec.partition_type(table_schema_simple) == StructType( NestedField(field_id=1000, name="str_truncate", field_type=StringType(), required=False), - NestedField(field_id=1001, name="int_bucket", field_type=IntegerType(), required=False), + NestedField(field_id=1001, name="int_bucket", field_type=IntegerType(), required=True), ) From 67ea776c2a7a7039fba66fccd935366ed9bbc8a9 Mon Sep 17 00:00:00 2001 From: bolkedebruin Date: Mon, 11 Mar 2024 09:04:58 +0100 Subject: [PATCH 165/169] Allow fsspec up to 2025.1 (#510) fsspec uses calendar versioning in which case the limit might not even make sense. Bumping it to another year, but you might want to consider removing the upper bound. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 52c60d9482..dcb65f7c5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ rich = ">=10.11.0,<14.0.0" strictyaml = ">=1.7.0,<2.0.0" # CVE-2020-14343 was fixed in 5.4. pydantic = ">=2.0,<3.0,!=2.4.0,!=2.4.1" # 2.4.0, 2.4.1 has a critical bug sortedcontainers = "2.4.0" -fsspec = ">=2023.1.0,<2024.1.0" +fsspec = ">=2023.1.0,<2025.1.0" pyparsing = ">=3.1.0,<4.0.0" zstandard = ">=0.13.0,<1.0.0" tenacity = ">=8.2.3,<9.0.0" From a48a00115cbd334dff0272c94c7b188316a5c05c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 21:52:37 -0700 Subject: [PATCH 166/169] Build: Bump pypa/cibuildwheel from 2.16.5 to 2.17.0 (#517) Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.5 to 2.17.0. - [Release notes](https://github.com/pypa/cibuildwheel/releases) - [Changelog](https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md) - [Commits](https://github.com/pypa/cibuildwheel/compare/v2.16.5...v2.17.0) --- updated-dependencies: - dependency-name: pypa/cibuildwheel dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/python-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-release.yml b/.github/workflows/python-release.yml index db88f7e824..54446049a4 100644 --- a/.github/workflows/python-release.yml +++ b/.github/workflows/python-release.yml @@ -59,7 +59,7 @@ jobs: if: startsWith(matrix.os, 'ubuntu') - name: Build wheels - uses: pypa/cibuildwheel@v2.16.5 + uses: pypa/cibuildwheel@v2.17.0 with: output-dir: wheelhouse config-file: "pyproject.toml" From 0ee7f1233e062b17b03fccc1eac6449c72e575f0 Mon Sep 17 00:00:00 2001 From: Andre Luis Anastacio Date: Tue, 12 Mar 2024 15:49:10 -0300 Subject: [PATCH 167/169] Decouple imports reported by mypy linter (#519) --- pyiceberg/avro/file.py | 3 ++- pyiceberg/catalog/__init__.py | 2 +- pyiceberg/catalog/dynamodb.py | 4 +--- pyiceberg/catalog/rest.py | 4 +--- pyiceberg/cli/output.py | 3 ++- pyiceberg/io/pyarrow.py | 2 +- pyiceberg/table/__init__.py | 3 +-- pyproject.toml | 1 + tests/avro/test_file.py | 2 +- tests/avro/test_resolver.py | 3 ++- tests/io/test_pyarrow.py | 2 +- 11 files changed, 14 insertions(+), 15 deletions(-) diff --git a/pyiceberg/avro/file.py b/pyiceberg/avro/file.py index 2f21e165b4..d0da7651b7 100644 --- a/pyiceberg/avro/file.py +++ b/pyiceberg/avro/file.py @@ -35,7 +35,8 @@ TypeVar, ) -from pyiceberg.avro.codecs import KNOWN_CODECS, Codec +from pyiceberg.avro.codecs import KNOWN_CODECS +from pyiceberg.avro.codecs.codec import Codec from pyiceberg.avro.decoder import BinaryDecoder, new_decoder from pyiceberg.avro.encoder import BinaryEncoder from pyiceberg.avro.reader import Reader diff --git a/pyiceberg/catalog/__init__.py b/pyiceberg/catalog/__init__.py index db83658f1f..4f7b5a3292 100644 --- a/pyiceberg/catalog/__init__.py +++ b/pyiceberg/catalog/__init__.py @@ -46,8 +46,8 @@ CommitTableRequest, CommitTableResponse, Table, - TableMetadata, ) +from pyiceberg.table.metadata import TableMetadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.typedef import ( EMPTY_DICT, diff --git a/pyiceberg/catalog/dynamodb.py b/pyiceberg/catalog/dynamodb.py index b7b0f3ddb1..266dd6353d 100644 --- a/pyiceberg/catalog/dynamodb.py +++ b/pyiceberg/catalog/dynamodb.py @@ -34,8 +34,6 @@ PREVIOUS_METADATA_LOCATION, TABLE_TYPE, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( @@ -56,7 +54,7 @@ from pyiceberg.table import CommitTableRequest, CommitTableResponse, Table from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder -from pyiceberg.typedef import EMPTY_DICT +from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties if TYPE_CHECKING: import pyarrow as pa diff --git a/pyiceberg/catalog/rest.py b/pyiceberg/catalog/rest.py index a5f33f02dd..f67fe2c1fd 100644 --- a/pyiceberg/catalog/rest.py +++ b/pyiceberg/catalog/rest.py @@ -38,8 +38,6 @@ URI, WAREHOUSE_LOCATION, Catalog, - Identifier, - Properties, PropertiesUpdateSummary, ) from pyiceberg.exceptions import ( @@ -68,7 +66,7 @@ ) from pyiceberg.table.metadata import TableMetadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder, assign_fresh_sort_order_ids -from pyiceberg.typedef import EMPTY_DICT, UTF8, IcebergBaseModel +from pyiceberg.typedef import EMPTY_DICT, UTF8, IcebergBaseModel, Identifier, Properties from pyiceberg.types import transform_dict_value_to_str if TYPE_CHECKING: diff --git a/pyiceberg/cli/output.py b/pyiceberg/cli/output.py index 18cdab1556..67081fbd8d 100644 --- a/pyiceberg/cli/output.py +++ b/pyiceberg/cli/output.py @@ -31,7 +31,8 @@ from pyiceberg.partitioning import PartitionSpec from pyiceberg.schema import Schema -from pyiceberg.table import Table, TableMetadata +from pyiceberg.table import Table +from pyiceberg.table.metadata import TableMetadata from pyiceberg.table.refs import SnapshotRefType from pyiceberg.typedef import IcebergBaseModel, Identifier, Properties diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 7192513a2d..f1e4d302ec 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -69,8 +69,8 @@ ) from sortedcontainers import SortedList -from pyiceberg.avro.resolver import ResolveError from pyiceberg.conversions import to_bytes +from pyiceberg.exceptions import ResolveError from pyiceberg.expressions import ( AlwaysTrue, BooleanExpression, diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index b244dfb16d..c5dd2e8e70 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -71,7 +71,6 @@ from pyiceberg.partitioning import ( INITIAL_PARTITION_SPEC_ID, PARTITION_FIELD_ID_START, - IdentityTransform, PartitionField, PartitionSpec, _PartitionNameGenerator, @@ -108,7 +107,7 @@ update_snapshot_summaries, ) from pyiceberg.table.sorting import SortOrder -from pyiceberg.transforms import TimeTransform, Transform, VoidTransform +from pyiceberg.transforms import IdentityTransform, TimeTransform, Transform, VoidTransform from pyiceberg.typedef import ( EMPTY_DICT, IcebergBaseModel, diff --git a/pyproject.toml b/pyproject.toml index dcb65f7c5d..4095f325da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -149,6 +149,7 @@ warn_unreachable = true warn_unused_ignores = true disallow_any_generics = true disallow_untyped_defs = true +implicit_reexport = false [[tool.mypy.overrides]] module = "pyarrow.*" diff --git a/tests/avro/test_file.py b/tests/avro/test_file.py index 74458fd923..8a3aaf72cb 100644 --- a/tests/avro/test_file.py +++ b/tests/avro/test_file.py @@ -26,7 +26,7 @@ from fastavro import reader, writer import pyiceberg.avro.file as avro -from pyiceberg.avro.codecs import DeflateCodec +from pyiceberg.avro.codecs.deflate import DeflateCodec from pyiceberg.avro.file import META_SCHEMA, AvroFileHeader from pyiceberg.io.pyarrow import PyArrowFileIO from pyiceberg.manifest import ( diff --git a/tests/avro/test_resolver.py b/tests/avro/test_resolver.py index c08ca235fb..07d41491fa 100644 --- a/tests/avro/test_resolver.py +++ b/tests/avro/test_resolver.py @@ -32,7 +32,7 @@ StringReader, StructReader, ) -from pyiceberg.avro.resolver import ResolveError, resolve_reader, resolve_writer +from pyiceberg.avro.resolver import resolve_reader, resolve_writer from pyiceberg.avro.writer import ( BinaryWriter, DefaultWriter, @@ -44,6 +44,7 @@ StringWriter, StructWriter, ) +from pyiceberg.exceptions import ResolveError from pyiceberg.io.pyarrow import PyArrowFileIO from pyiceberg.manifest import MANIFEST_ENTRY_SCHEMAS from pyiceberg.schema import Schema diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index 2acffdfdf9..c2d8f7bd12 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -28,8 +28,8 @@ import pytest from pyarrow.fs import FileType, LocalFileSystem -from pyiceberg.avro.resolver import ResolveError from pyiceberg.catalog.noop import NoopCatalog +from pyiceberg.exceptions import ResolveError from pyiceberg.expressions import ( AlwaysFalse, AlwaysTrue, From 1fd85c8c00318edbd8718afab4301ebaa1455fee Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Tue, 12 Mar 2024 22:57:18 +0100 Subject: [PATCH 168/169] build: Move back to the mmh3 (#460) --- poetry.lock | 1023 ++++++++++++++++++++++++++---------------------- pyproject.toml | 2 +- 2 files changed, 565 insertions(+), 460 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6cd3829a78..4908aff6b7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -46,87 +46,87 @@ boto3 = ["boto3 (>=1.33.2,<1.34.35)"] [[package]] name = "aiohttp" -version = "3.9.2" +version = "3.9.3" description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:772fbe371788e61c58d6d3d904268e48a594ba866804d08c995ad71b144f94cb"}, - {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:edd4f1af2253f227ae311ab3d403d0c506c9b4410c7fc8d9573dec6d9740369f"}, - {file = "aiohttp-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cfee9287778399fdef6f8a11c9e425e1cb13cc9920fd3a3df8f122500978292b"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc158466f6a980a6095ee55174d1de5730ad7dec251be655d9a6a9dd7ea1ff9"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54ec82f45d57c9a65a1ead3953b51c704f9587440e6682f689da97f3e8defa35"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abeb813a18eb387f0d835ef51f88568540ad0325807a77a6e501fed4610f864e"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc91d07280d7d169f3a0f9179d8babd0ee05c79d4d891447629ff0d7d8089ec2"}, - {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b65e861f4bebfb660f7f0f40fa3eb9f2ab9af10647d05dac824390e7af8f75b7"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04fd8ffd2be73d42bcf55fd78cde7958eeee6d4d8f73c3846b7cba491ecdb570"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3d8d962b439a859b3ded9a1e111a4615357b01620a546bc601f25b0211f2da81"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:8ceb658afd12b27552597cf9a65d9807d58aef45adbb58616cdd5ad4c258c39e"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0e4ee4df741670560b1bc393672035418bf9063718fee05e1796bf867e995fad"}, - {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2dec87a556f300d3211decf018bfd263424f0690fcca00de94a837949fbcea02"}, - {file = "aiohttp-3.9.2-cp310-cp310-win32.whl", hash = "sha256:3e1a800f988ce7c4917f34096f81585a73dbf65b5c39618b37926b1238cf9bc4"}, - {file = "aiohttp-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:ea510718a41b95c236c992b89fdfc3d04cc7ca60281f93aaada497c2b4e05c46"}, - {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6aaa6f99256dd1b5756a50891a20f0d252bd7bdb0854c5d440edab4495c9f973"}, - {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a27d8c70ad87bcfce2e97488652075a9bdd5b70093f50b10ae051dfe5e6baf37"}, - {file = "aiohttp-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:54287bcb74d21715ac8382e9de146d9442b5f133d9babb7e5d9e453faadd005e"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb3d05569aa83011fcb346b5266e00b04180105fcacc63743fc2e4a1862a891"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8534e7d69bb8e8d134fe2be9890d1b863518582f30c9874ed7ed12e48abe3c4"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd9d5b989d57b41e4ff56ab250c5ddf259f32db17159cce630fd543376bd96b"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa6904088e6642609981f919ba775838ebf7df7fe64998b1a954fb411ffb4663"}, - {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda42eb410be91b349fb4ee3a23a30ee301c391e503996a638d05659d76ea4c2"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:193cc1ccd69d819562cc7f345c815a6fc51d223b2ef22f23c1a0f67a88de9a72"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b9f1cb839b621f84a5b006848e336cf1496688059d2408e617af33e3470ba204"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d22a0931848b8c7a023c695fa2057c6aaac19085f257d48baa24455e67df97ec"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4112d8ba61fbd0abd5d43a9cb312214565b446d926e282a6d7da3f5a5aa71d36"}, - {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c4ad4241b52bb2eb7a4d2bde060d31c2b255b8c6597dd8deac2f039168d14fd7"}, - {file = "aiohttp-3.9.2-cp311-cp311-win32.whl", hash = "sha256:ee2661a3f5b529f4fc8a8ffee9f736ae054adfb353a0d2f78218be90617194b3"}, - {file = "aiohttp-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:4deae2c165a5db1ed97df2868ef31ca3cc999988812e82386d22937d9d6fed52"}, - {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6f4cdba12539215aaecf3c310ce9d067b0081a0795dd8a8805fdb67a65c0572a"}, - {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:84e843b33d5460a5c501c05539809ff3aee07436296ff9fbc4d327e32aa3a326"}, - {file = "aiohttp-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8008d0f451d66140a5aa1c17e3eedc9d56e14207568cd42072c9d6b92bf19b52"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61c47ab8ef629793c086378b1df93d18438612d3ed60dca76c3422f4fbafa792"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc71f748e12284312f140eaa6599a520389273174b42c345d13c7e07792f4f57"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1c3a4d0ab2f75f22ec80bca62385db2e8810ee12efa8c9e92efea45c1849133"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a87aa0b13bbee025faa59fa58861303c2b064b9855d4c0e45ec70182bbeba1b"}, - {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2cc0d04688b9f4a7854c56c18aa7af9e5b0a87a28f934e2e596ba7e14783192"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1956e3ac376b1711c1533266dec4efd485f821d84c13ce1217d53e42c9e65f08"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:114da29f39eccd71b93a0fcacff178749a5c3559009b4a4498c2c173a6d74dff"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3f17999ae3927d8a9a823a1283b201344a0627272f92d4f3e3a4efe276972fe8"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f31df6a32217a34ae2f813b152a6f348154f948c83213b690e59d9e84020925c"}, - {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7a75307ffe31329928a8d47eae0692192327c599113d41b278d4c12b54e1bd11"}, - {file = "aiohttp-3.9.2-cp312-cp312-win32.whl", hash = "sha256:972b63d589ff8f305463593050a31b5ce91638918da38139b9d8deaba9e0fed7"}, - {file = "aiohttp-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:200dc0246f0cb5405c80d18ac905c8350179c063ea1587580e3335bfc243ba6a"}, - {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:158564d0d1020e0d3fe919a81d97aadad35171e13e7b425b244ad4337fc6793a"}, - {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da1346cd0ccb395f0ed16b113ebb626fa43b7b07fd7344fce33e7a4f04a8897a"}, - {file = "aiohttp-3.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eaa9256de26ea0334ffa25f1913ae15a51e35c529a1ed9af8e6286dd44312554"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1543e7fb00214fb4ccead42e6a7d86f3bb7c34751ec7c605cca7388e525fd0b4"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:186e94570433a004e05f31f632726ae0f2c9dee4762a9ce915769ce9c0a23d89"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d52d20832ac1560f4510d68e7ba8befbc801a2b77df12bd0cd2bcf3b049e52a4"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c45e4e815ac6af3b72ca2bde9b608d2571737bb1e2d42299fc1ffdf60f6f9a1"}, - {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa906b9bdfd4a7972dd0628dbbd6413d2062df5b431194486a78f0d2ae87bd55"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:68bbee9e17d66f17bb0010aa15a22c6eb28583edcc8b3212e2b8e3f77f3ebe2a"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4c189b64bd6d9a403a1a3f86a3ab3acbc3dc41a68f73a268a4f683f89a4dec1f"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8a7876f794523123bca6d44bfecd89c9fec9ec897a25f3dd202ee7fc5c6525b7"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d23fba734e3dd7b1d679b9473129cd52e4ec0e65a4512b488981a56420e708db"}, - {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b141753be581fab842a25cb319f79536d19c2a51995d7d8b29ee290169868eab"}, - {file = "aiohttp-3.9.2-cp38-cp38-win32.whl", hash = "sha256:103daf41ff3b53ba6fa09ad410793e2e76c9d0269151812e5aba4b9dd674a7e8"}, - {file = "aiohttp-3.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:328918a6c2835861ff7afa8c6d2c70c35fdaf996205d5932351bdd952f33fa2f"}, - {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5264d7327c9464786f74e4ec9342afbbb6ee70dfbb2ec9e3dfce7a54c8043aa3"}, - {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07205ae0015e05c78b3288c1517afa000823a678a41594b3fdc870878d645305"}, - {file = "aiohttp-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0a1e638cffc3ec4d4784b8b4fd1cf28968febc4bd2718ffa25b99b96a741bd"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d43302a30ba1166325974858e6ef31727a23bdd12db40e725bec0f759abce505"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16a967685907003765855999af11a79b24e70b34dc710f77a38d21cd9fc4f5fe"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fa3ee92cd441d5c2d07ca88d7a9cef50f7ec975f0117cd0c62018022a184308"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b500c5ad9c07639d48615a770f49618130e61be36608fc9bc2d9bae31732b8f"}, - {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c07327b368745b1ce2393ae9e1aafed7073d9199e1dcba14e035cc646c7941bf"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc7d6502c23a0ec109687bf31909b3fb7b196faf198f8cff68c81b49eb316ea9"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:07be2be7071723c3509ab5c08108d3a74f2181d4964e869f2504aaab68f8d3e8"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:122468f6fee5fcbe67cb07014a08c195b3d4c41ff71e7b5160a7bcc41d585a5f"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:00a9abcea793c81e7f8778ca195a1714a64f6d7436c4c0bb168ad2a212627000"}, - {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a9825fdd64ecac5c670234d80bb52bdcaa4139d1f839165f548208b3779c6c6"}, - {file = "aiohttp-3.9.2-cp39-cp39-win32.whl", hash = "sha256:5422cd9a4a00f24c7244e1b15aa9b87935c85fb6a00c8ac9b2527b38627a9211"}, - {file = "aiohttp-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:7d579dcd5d82a86a46f725458418458fa43686f6a7b252f2966d359033ffc8ab"}, - {file = "aiohttp-3.9.2.tar.gz", hash = "sha256:b0ad0a5e86ce73f5368a164c10ada10504bf91869c05ab75d982c6048217fbf7"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, ] [package.dependencies] @@ -214,13 +214,13 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "aws-sam-translator" -version = "1.83.0" +version = "1.85.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = ">=3.8, <=4.0, !=4.0" files = [ - {file = "aws-sam-translator-1.83.0.tar.gz", hash = "sha256:46025ca8894a56eacd87eb0e4f9af5c01c567c9a734b97fbba353bffd56ba5dc"}, - {file = "aws_sam_translator-1.83.0-py3-none-any.whl", hash = "sha256:022246b4745cc9067f88ab2b051f49898606bcf0222e22b9da41063e5619c6f9"}, + {file = "aws-sam-translator-1.85.0.tar.gz", hash = "sha256:e41938affa128fb5bde5e1989b260bf539a96369bba3faf316ce66651351df39"}, + {file = "aws_sam_translator-1.85.0-py3-none-any.whl", hash = "sha256:e8c69a4db7279421ff6c3579cd4d43395fe9b6781f50416528e984be68e25481"}, ] [package.dependencies] @@ -249,13 +249,13 @@ wrapt = "*" [[package]] name = "azure-core" -version = "1.29.7" +version = "1.30.0" description = "Microsoft Azure Core Library for Python" optional = true python-versions = ">=3.7" files = [ - {file = "azure-core-1.29.7.tar.gz", hash = "sha256:2944faf1a7ff1558b1f457cabf60f279869cabaeef86b353bed8eb032c7d8c5e"}, - {file = "azure_core-1.29.7-py3-none-any.whl", hash = "sha256:95a7b41b4af102e5fcdfac9500fcc82ff86e936c7145a099b7848b9ac0501250"}, + {file = "azure-core-1.30.0.tar.gz", hash = "sha256:6f3a7883ef184722f6bd997262eddaf80cfe7e5b3e0caaaf8db1695695893d35"}, + {file = "azure_core-1.30.0-py3-none-any.whl", hash = "sha256:3dae7962aad109610e68c9a7abb31d79720e1d982ddf61363038d175a5025e89"}, ] [package.dependencies] @@ -408,13 +408,13 @@ files = [ [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -494,17 +494,17 @@ files = [ [[package]] name = "cfn-lint" -version = "0.84.0" +version = "0.85.2" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" optional = false python-versions = ">=3.8, <=4.0, !=4.0" files = [ - {file = "cfn-lint-0.84.0.tar.gz", hash = "sha256:7a819ffa48ab23f775037ddb0d9330ba206d547439f69bc72f76f1183d8bc124"}, - {file = "cfn_lint-0.84.0-py3-none-any.whl", hash = "sha256:1bf18bca82c6666fd204a265a203bd307816e8d31fe476301b105c5370e306e5"}, + {file = "cfn-lint-0.85.2.tar.gz", hash = "sha256:f8a5cc55daeaaa747b8d776dcf62fe1b6bfb8cb46ae60950cbe627601facccd7"}, + {file = "cfn_lint-0.85.2-py3-none-any.whl", hash = "sha256:e7a0aafb9ad93dbe5db54cbefca92a94f2d173309218273ef997ecb048125d89"}, ] [package.dependencies] -aws-sam-translator = ">=1.83.0" +aws-sam-translator = ">=1.84.0" jschema-to-python = ">=1.2.3,<1.3.0" jsonpatch = "*" jsonschema = ">=3.0,<5" @@ -708,43 +708,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.2" +version = "42.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, - {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, - {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, - {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, - {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, - {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, - {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, - {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, + {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"}, + {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"}, + {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"}, + {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"}, + {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"}, + {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"}, + {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"}, + {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"}, ] [package.dependencies] @@ -1015,13 +1015,13 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flask" -version = "3.0.1" +version = "3.0.2" description = "A simple framework for building complex web applications." optional = false python-versions = ">=3.8" files = [ - {file = "flask-3.0.1-py3-none-any.whl", hash = "sha256:ca631a507f6dfe6c278ae20112cea3ff54ff2216390bf8880f6b035a5354af13"}, - {file = "flask-3.0.1.tar.gz", hash = "sha256:6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403"}, + {file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"}, + {file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"}, ] [package.dependencies] @@ -1233,13 +1233,13 @@ ray = ["packaging", "ray[client,data] (>=2.0.0)"] [[package]] name = "google-api-core" -version = "2.15.0" +version = "2.17.1" description = "Google API client core library" optional = true python-versions = ">=3.7" files = [ - {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, - {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, + {file = "google-api-core-2.17.1.tar.gz", hash = "sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95"}, + {file = "google_api_core-2.17.1-py3-none-any.whl", hash = "sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e"}, ] [package.dependencies] @@ -1255,13 +1255,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.26.2" +version = "2.28.0" description = "Google Authentication Library" optional = true python-versions = ">=3.7" files = [ - {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, - {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, + {file = "google-auth-2.28.0.tar.gz", hash = "sha256:3cfc1b6e4e64797584fb53fc9bd0b7afa9b7c0dba2004fa7dcc9349e58cc3195"}, + {file = "google_auth-2.28.0-py2.py3-none-any.whl", hash = "sha256:7634d29dcd1e101f5226a23cbc4a0c6cda6394253bf80e281d9c5c6797869c53"}, ] [package.dependencies] @@ -1533,13 +1533,13 @@ test = ["objgraph", "psutil"] [[package]] name = "identify" -version = "2.5.33" +version = "2.5.35" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, - {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, + {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, + {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, ] [package.extras] @@ -1717,19 +1717,18 @@ jsonpointer = ">=1.9" [[package]] name = "jsonpickle" -version = "3.0.2" +version = "3.0.3" description = "Python library for serializing any arbitrary object graph into JSON" optional = false python-versions = ">=3.7" files = [ - {file = "jsonpickle-3.0.2-py3-none-any.whl", hash = "sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f"}, - {file = "jsonpickle-3.0.2.tar.gz", hash = "sha256:e37abba4bfb3ca4a4647d28bb9f4706436f7b46c8a8333b4a718abafa8e46b37"}, + {file = "jsonpickle-3.0.3-py3-none-any.whl", hash = "sha256:e8d6dcc58f6722bea0321cd328fbda81c582461185688a535df02be0f699afb4"}, + {file = "jsonpickle-3.0.3.tar.gz", hash = "sha256:5691f44495327858ab3a95b9c440a79b41e35421be1a6e09a47b6c9b9421fd06"}, ] [package.extras] -docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] -testing-libs = ["simplejson", "ujson"] +docs = ["furo", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] +testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-ruff", "scikit-learn", "simplejson", "sqlalchemy", "ujson"] [[package]] name = "jsonpointer" @@ -1883,71 +1882,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.4" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, - {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -1962,48 +1961,97 @@ files = [ ] [[package]] -name = "mmhash3" -version = "3.0.1" -description = "Python wrapper for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +name = "mmh3" +version = "4.1.0" +description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." optional = false python-versions = "*" files = [ - {file = "mmhash3-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47deea30cd8d3d5cd52dc740902a4c70383bfe8248eac29d0877fe63e03c2713"}, - {file = "mmhash3-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ecdaf4d1de617818bf05cd526ca558db6010beeba7ea9e19f695f2bdcac0e0a4"}, - {file = "mmhash3-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4675585617584e9e1aafa3a90ac0ac9a437257c507748d97de8b21977e9d6745"}, - {file = "mmhash3-3.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebfd0c2af09b41f0fe1dd260799bda90a0fc7eba4477ccaeb3951527700cd58f"}, - {file = "mmhash3-3.0.1-cp310-cp310-win32.whl", hash = "sha256:68587dec7b8acdb7528fd511e295d8b5ccfe26022923a69867e1822f0fdb4c44"}, - {file = "mmhash3-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:54954ebe3a614f19ab4cfe24fa66ea093c493d9fac0533d002dd64c2540a0c99"}, - {file = "mmhash3-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b172f3bd3220b0bf56fd7cc760fd8a9033def47103611d63fe867003079a1256"}, - {file = "mmhash3-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de7895eafabc32f7c0c09a81a624921f536768de6e438e3de69e3e954a4d7072"}, - {file = "mmhash3-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b0914effe4ddd8d33149e3508564c17719465b0cc81691c4fa50d5e0e14f80"}, - {file = "mmhash3-3.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0575050ac691475938df1ad03d8738c5bd1eadef62093e76157ebb7f2df0946"}, - {file = "mmhash3-3.0.1-cp311-cp311-win32.whl", hash = "sha256:22f92f0f88f28b215357acd346362fa9f7c9fffb436beb42cc4b442b676dbaa3"}, - {file = "mmhash3-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:538240ab7936bf71b18304e5a7e7fd3c4c2fab103330ea99584bb4f777299a2b"}, - {file = "mmhash3-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ca791bfb311e36ce13998e4632262ed4b95da9d3461941e18b6690760171a045"}, - {file = "mmhash3-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b41708f72c6aa2a49ada1f0b61e85c05cd8389ad31d463fd5bca29999a4d5f9c"}, - {file = "mmhash3-3.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3ce9b4533ddc0a88ba045a27309714c3b127bd05e57fd252d1d5a71d4247ea7"}, - {file = "mmhash3-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:bfafeb96fdeb10db8767d06e1f07b6fdcddba4aaa0dd15058561a49f7ae45345"}, - {file = "mmhash3-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:97fe077b24c948709ed2afc749bf6285d407bc54ff12c63d2dc86678c38a0b8e"}, - {file = "mmhash3-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0cfd91ccd5fca1ba7ee925297131a15dfb94c714bfe6ba0fb3b1ca78b12bbfec"}, - {file = "mmhash3-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d51b1005233141ce7394531af40a3f0fc1f274467bf8dff44dcf7987924fe58"}, - {file = "mmhash3-3.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:855c67b100e37df166acf79cdff58fa8f9f6c48be0d1e1b6e9ad0fa34a9661ef"}, - {file = "mmhash3-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:bb3030df1334fd665427f8be8e8ce4f04aeab7f6010ce4f2c128f0099bdab96f"}, - {file = "mmhash3-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1545e1177294afe4912d5a5e401c7fa9b799dd109b30289e7af74d5b07e7c474"}, - {file = "mmhash3-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2479899e7dda834a671991a1098a691ab1c2eaa20c3e939d691ca4a19361cfe0"}, - {file = "mmhash3-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9056196d5e3d3d844433a63d806a683f710ab3aaf1c910550c7746464bc43ae"}, - {file = "mmhash3-3.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d4c307af0bf70207305f70f131898be071d1b19a89f462b13487f5c25e8d4e"}, - {file = "mmhash3-3.0.1-cp38-cp38-win32.whl", hash = "sha256:5f885f65e329fd14bc38debac4a79eacf381e856965d9c65c4d1c6946ea190d0"}, - {file = "mmhash3-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:3b42d0bda5e1cd22c18b16887b0521060fb59d0aaaaf033feacbc0a2492d20fe"}, - {file = "mmhash3-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3f333286bb87aa9dc6bd8e7960a55a27b011a401f24b889a50e6d219f65e7c9"}, - {file = "mmhash3-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b7ef2eb95a18bcd02ce0d3e047adde3a025afd96c1d266a8a0d44574f44a307"}, - {file = "mmhash3-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6ac8a5f511c60f341bf9cae462bb4941abb149d98464ba5f4f4548875b601c6"}, - {file = "mmhash3-3.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efef9e632e6e248e46f52d108a5ebd1f28edaf427b7fd47ebf97dbff4b2cab81"}, - {file = "mmhash3-3.0.1-cp39-cp39-win32.whl", hash = "sha256:bdac06d72e448c67afb12e758b203d875e29d4097bb279a38a5649d44b518ba7"}, - {file = "mmhash3-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0baeaa20cac5f75ed14f28056826bd9d9c8b2354b382073f3fd5190708992a0d"}, - {file = "mmhash3-3.0.1.tar.gz", hash = "sha256:a00d68f4a1cc434b9501513c8a29e18ed1ddad383677d72b41d71d0d862348af"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, + {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, + {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, + {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, + {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, + {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, + {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, + {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, + {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, + {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, + {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, + {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, + {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, + {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, + {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, + {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, ] +[package.extras] +test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] + [[package]] name = "moto" version = "5.0.2" @@ -2182,85 +2230,101 @@ files = [ [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" optional = true python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] @@ -2346,6 +2410,51 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = true +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -2446,8 +2555,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -2511,28 +2620,28 @@ files = [ [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -2578,22 +2687,22 @@ virtualenv = ">=20.10.0" [[package]] name = "protobuf" -version = "4.25.2" +version = "4.25.3" description = "" optional = true python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.2-cp310-abi3-win32.whl", hash = "sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6"}, - {file = "protobuf-4.25.2-cp310-abi3-win_amd64.whl", hash = "sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9"}, - {file = "protobuf-4.25.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62"}, - {file = "protobuf-4.25.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020"}, - {file = "protobuf-4.25.2-cp38-cp38-win32.whl", hash = "sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61"}, - {file = "protobuf-4.25.2-cp38-cp38-win_amd64.whl", hash = "sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62"}, - {file = "protobuf-4.25.2-cp39-cp39-win32.whl", hash = "sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3"}, - {file = "protobuf-4.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0"}, - {file = "protobuf-4.25.2-py3-none-any.whl", hash = "sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830"}, - {file = "protobuf-4.25.2.tar.gz", hash = "sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e"}, + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, ] [[package]] @@ -2657,7 +2766,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -2666,8 +2774,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -3153,13 +3259,13 @@ files = [ [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -3210,7 +3316,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3302,13 +3407,13 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. [[package]] name = "referencing" -version = "0.32.1" +version = "0.33.0" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, - {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, + {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, + {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, ] [package.dependencies] @@ -3477,13 +3582,13 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "responses" -version = "0.24.1" +version = "0.25.0" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" files = [ - {file = "responses-0.24.1-py3-none-any.whl", hash = "sha256:a2b43f4c08bfb9c9bd242568328c65a34b318741d3fab884ac843c5ceeb543f9"}, - {file = "responses-0.24.1.tar.gz", hash = "sha256:b127c6ca3f8df0eb9cc82fd93109a3007a86acb24871834c47b77765152ecf8c"}, + {file = "responses-0.25.0-py3-none-any.whl", hash = "sha256:2f0b9c2b6437db4b528619a77e5d565e4ec2a9532162ac1a131a83529db7be1a"}, + {file = "responses-0.25.0.tar.gz", hash = "sha256:01ae6a02b4f34e39bffceb0fc6786b67a25eae919c6368d05eabc8d9576c2a66"}, ] [package.dependencies] @@ -3529,110 +3634,110 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.17.1" +version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, - {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, - {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, - {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, - {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, - {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, - {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, - {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, - {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, - {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, - {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, - {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, - {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, ] [[package]] @@ -3703,18 +3808,18 @@ pbr = "*" [[package]] name = "setuptools" -version = "69.0.3" +version = "69.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -3899,13 +4004,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -3930,13 +4035,13 @@ files = [ [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = true python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -4298,4 +4403,4 @@ zstandard = ["zstandard"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "5553acdf7ad32ec9dbc74523b9f7ff241907b0d8129a40e3052750c3182e7539" +content-hash = "e56883bd2961705dffa548c4a32f2d4215a646de149e12aa519df336ede48076" diff --git a/pyproject.toml b/pyproject.toml index 4095f325da..79e6eef525 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ include = [ [tool.poetry.dependencies] python = "^3.8" -mmhash3 = ">=3.0.0,<4.0.0" +mmh3 = ">=4.0.0,<5.0.0" requests = ">=2.20.0,<3.0.0" click = ">=7.1.1,<9.0.0" rich = ">=10.11.0,<14.0.0" From 36a505f7741f814c00b8babf6f26e89efde5b688 Mon Sep 17 00:00:00 2001 From: Kevin Liu Date: Wed, 13 Mar 2024 01:20:02 -0700 Subject: [PATCH 169/169] Improve the InMemory Catalog Implementation (#289) * extract InMemoryCatalog out of test * generalize InMemoryCatalog * make write work * write to temporary location * can override table location * memory.py -> in_memory.py * fix test_commit_table * rebase from main * revert fs changes * fix tests * add docs and comments * comma * comment * order * fix test * add license * `create_table` write metadata file * move InMemoryCatalog back to test_base * remove unused references * Update mkdocs/docs/configuration.md Co-authored-by: Fokko Driesprong * Update mkdocs/docs/configuration.md Co-authored-by: Fokko Driesprong * Update tests/catalog/test_base.py Co-authored-by: Fokko Driesprong * remove schema_id --------- Co-authored-by: Fokko Driesprong --- pyiceberg/cli/output.py | 2 +- tests/catalog/test_base.py | 104 +++++++++++++++++++++++++------------ tests/cli/test_console.py | 68 +++++++++++------------- 3 files changed, 101 insertions(+), 73 deletions(-) diff --git a/pyiceberg/cli/output.py b/pyiceberg/cli/output.py index 67081fbd8d..56b544c99f 100644 --- a/pyiceberg/cli/output.py +++ b/pyiceberg/cli/output.py @@ -158,7 +158,7 @@ def describe_properties(self, properties: Properties) -> None: Console().print(output_table) def text(self, response: str) -> None: - Console().print(response) + Console(soft_wrap=True).print(response) def schema(self, schema: Schema) -> None: output_table = self._table diff --git a/tests/catalog/test_base.py b/tests/catalog/test_base.py index 1f0060780e..44c36a7d2d 100644 --- a/tests/catalog/test_base.py +++ b/tests/catalog/test_base.py @@ -16,6 +16,9 @@ # under the License. # pylint:disable=redefined-outer-name + +import uuid +from pathlib import PosixPath from typing import ( Dict, List, @@ -40,7 +43,7 @@ NoSuchTableError, TableAlreadyExistsError, ) -from pyiceberg.io import load_file_io +from pyiceberg.io import WAREHOUSE, load_file_io from pyiceberg.partitioning import UNPARTITIONED_PARTITION_SPEC, PartitionField, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.table import ( @@ -53,15 +56,21 @@ TableIdentifier, update_table_metadata, ) -from pyiceberg.table.metadata import TableMetadataV1 +from pyiceberg.table.metadata import new_table_metadata from pyiceberg.table.sorting import UNSORTED_SORT_ORDER, SortOrder from pyiceberg.transforms import IdentityTransform from pyiceberg.typedef import EMPTY_DICT, Identifier, Properties from pyiceberg.types import IntegerType, LongType, NestedField +DEFAULT_WAREHOUSE_LOCATION = "file:///tmp/warehouse" + class InMemoryCatalog(Catalog): - """An in-memory catalog implementation for testing purposes.""" + """ + An in-memory catalog implementation that uses in-memory data-structures to store the namespaces and tables. + + This is useful for test, demo, and playground but not in production as data is not persisted. + """ __tables: Dict[Identifier, Table] __namespaces: Dict[Identifier, Properties] @@ -70,6 +79,7 @@ def __init__(self, name: str, **properties: str) -> None: super().__init__(name, **properties) self.__tables = {} self.__namespaces = {} + self._warehouse_location = properties.get(WAREHOUSE, DEFAULT_WAREHOUSE_LOCATION) def create_table( self, @@ -79,6 +89,7 @@ def create_table( partition_spec: PartitionSpec = UNPARTITIONED_PARTITION_SPEC, sort_order: SortOrder = UNSORTED_SORT_ORDER, properties: Properties = EMPTY_DICT, + table_uuid: Optional[uuid.UUID] = None, ) -> Table: schema: Schema = self._convert_schema_if_needed(schema) # type: ignore @@ -91,24 +102,26 @@ def create_table( if namespace not in self.__namespaces: self.__namespaces[namespace] = {} - new_location = location or f's3://warehouse/{"/".join(identifier)}/data' - metadata = TableMetadataV1(**{ - "format-version": 1, - "table-uuid": "d20125c8-7284-442c-9aea-15fee620737c", - "location": new_location, - "last-updated-ms": 1602638573874, - "last-column-id": schema.highest_field_id, - "schema": schema.model_dump(), - "partition-spec": partition_spec.model_dump()["fields"], - "properties": properties, - "current-snapshot-id": -1, - "snapshots": [{"snapshot-id": 1925, "timestamp-ms": 1602638573822}], - }) + if not location: + location = f'{self._warehouse_location}/{"/".join(identifier)}' + + metadata_location = self._get_metadata_location(location=location) + metadata = new_table_metadata( + schema=schema, + partition_spec=partition_spec, + sort_order=sort_order, + location=location, + properties=properties, + table_uuid=table_uuid, + ) + io = load_file_io({**self.properties, **properties}, location=location) + self._write_metadata(metadata, io, metadata_location) + table = Table( identifier=identifier, metadata=metadata, - metadata_location=f's3://warehouse/{"/".join(identifier)}/metadata/metadata.json', - io=load_file_io(), + metadata_location=metadata_location, + io=io, catalog=self, ) self.__tables[identifier] = table @@ -118,14 +131,29 @@ def register_table(self, identifier: Union[str, Identifier], metadata_location: raise NotImplementedError def _commit_table(self, table_request: CommitTableRequest) -> CommitTableResponse: - identifier = tuple(table_request.identifier.namespace.root) + (table_request.identifier.name,) - table = self.__tables[identifier] - table.metadata = update_table_metadata(base_metadata=table.metadata, updates=table_request.updates) - - return CommitTableResponse( - metadata=table.metadata.model_dump(), - metadata_location=table.location(), + identifier_tuple = self.identifier_to_tuple_without_catalog( + tuple(table_request.identifier.namespace.root + [table_request.identifier.name]) ) + current_table = self.load_table(identifier_tuple) + base_metadata = current_table.metadata + + for requirement in table_request.requirements: + requirement.validate(base_metadata) + + updated_metadata = update_table_metadata(base_metadata, table_request.updates) + if updated_metadata == base_metadata: + # no changes, do nothing + return CommitTableResponse(metadata=base_metadata, metadata_location=current_table.metadata_location) + + # write new metadata + new_metadata_version = self._parse_metadata_version(current_table.metadata_location) + 1 + new_metadata_location = self._get_metadata_location(current_table.metadata.location, new_metadata_version) + self._write_metadata(updated_metadata, current_table.io, new_metadata_location) + + # update table state + current_table.metadata = updated_metadata + + return CommitTableResponse(metadata=updated_metadata, metadata_location=new_metadata_location) def load_table(self, identifier: Union[str, Identifier]) -> Table: identifier = self.identifier_to_tuple_without_catalog(identifier) @@ -160,7 +188,7 @@ def rename_table(self, from_identifier: Union[str, Identifier], to_identifier: U identifier=to_identifier, metadata=table.metadata, metadata_location=table.metadata_location, - io=load_file_io(), + io=self._load_file_io(properties=table.metadata.properties, location=table.metadata_location), catalog=self, ) return self.__tables[to_identifier] @@ -232,8 +260,8 @@ def update_namespace_properties( @pytest.fixture -def catalog() -> InMemoryCatalog: - return InMemoryCatalog("test.in.memory.catalog", **{"test.key": "test.value"}) +def catalog(tmp_path: PosixPath) -> InMemoryCatalog: + return InMemoryCatalog("test.in_memory.catalog", **{WAREHOUSE: tmp_path.absolute().as_posix(), "test.key": "test.value"}) TEST_TABLE_IDENTIFIER = ("com", "organization", "department", "my_table") @@ -244,7 +272,6 @@ def catalog() -> InMemoryCatalog: NestedField(2, "y", LongType(), doc="comment"), NestedField(3, "z", LongType()), ) -TEST_TABLE_LOCATION = "protocol://some/location" TEST_TABLE_PARTITION_SPEC = PartitionSpec(PartitionField(name="x", transform=IdentityTransform(), source_id=1, field_id=1000)) TEST_TABLE_PROPERTIES = {"key1": "value1", "key2": "value2"} NO_SUCH_TABLE_ERROR = "Table does not exist: \\('com', 'organization', 'department', 'my_table'\\)" @@ -261,7 +288,6 @@ def given_catalog_has_a_table( return catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=properties or TEST_TABLE_PROPERTIES, ) @@ -307,13 +333,25 @@ def test_create_table(catalog: InMemoryCatalog) -> None: table = catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) assert catalog.load_table(TEST_TABLE_IDENTIFIER) == table +def test_create_table_location_override(catalog: InMemoryCatalog) -> None: + new_location = f"{catalog._warehouse_location}/new_location" + table = catalog.create_table( + identifier=TEST_TABLE_IDENTIFIER, + schema=TEST_TABLE_SCHEMA, + location=new_location, + partition_spec=TEST_TABLE_PARTITION_SPEC, + properties=TEST_TABLE_PROPERTIES, + ) + assert catalog.load_table(TEST_TABLE_IDENTIFIER) == table + assert table.location() == new_location + + @pytest.mark.parametrize( "schema,expected", [ @@ -335,8 +373,6 @@ def test_create_table_pyarrow_schema(catalog: InMemoryCatalog, pyarrow_schema_si table = catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=pyarrow_schema_simple_without_ids, - location=TEST_TABLE_LOCATION, - partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) assert catalog.load_table(TEST_TABLE_IDENTIFIER) == table @@ -662,7 +698,7 @@ def test_add_column_with_statement(catalog: InMemoryCatalog) -> None: def test_catalog_repr(catalog: InMemoryCatalog) -> None: s = repr(catalog) - assert s == "test.in.memory.catalog ()" + assert s == "test.in_memory.catalog ()" def test_table_properties_int_value(catalog: InMemoryCatalog) -> None: diff --git a/tests/cli/test_console.py b/tests/cli/test_console.py index d77b290ec6..3c208c0ab1 100644 --- a/tests/cli/test_console.py +++ b/tests/cli/test_console.py @@ -14,13 +14,18 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import datetime import os +import uuid +from pathlib import PosixPath +from unittest.mock import MagicMock import pytest from click.testing import CliRunner from pytest_mock import MockFixture from pyiceberg.cli.console import run +from pyiceberg.io import WAREHOUSE from pyiceberg.partitioning import PartitionField, PartitionSpec from pyiceberg.schema import Schema from pyiceberg.transforms import IdentityTransform @@ -48,8 +53,10 @@ def env_vars(mocker: MockFixture) -> None: @pytest.fixture(name="catalog") -def fixture_catalog(mocker: MockFixture) -> InMemoryCatalog: - in_memory_catalog = InMemoryCatalog("test.in.memory.catalog", **{"test.key": "test.value"}) +def fixture_catalog(mocker: MockFixture, tmp_path: PosixPath) -> InMemoryCatalog: + in_memory_catalog = InMemoryCatalog( + "test.in_memory.catalog", **{WAREHOUSE: tmp_path.absolute().as_posix(), "test.key": "test.value"} + ) mocker.patch("pyiceberg.cli.console.load_catalog", return_value=in_memory_catalog) return in_memory_catalog @@ -59,6 +66,13 @@ def fixture_namespace_properties() -> Properties: return TEST_NAMESPACE_PROPERTIES.copy() +@pytest.fixture() +def mock_datetime_now(monkeypatch: pytest.MonkeyPatch) -> None: + datetime_mock = MagicMock(wraps=datetime.datetime) + datetime_mock.now.return_value = datetime.datetime.fromtimestamp(TEST_TIMESTAMP / 1000.0).astimezone() + monkeypatch.setattr(datetime, "datetime", datetime_mock) + + TEST_TABLE_IDENTIFIER = ("default", "my_table") TEST_TABLE_NAMESPACE = "default" TEST_NAMESPACE_PROPERTIES = {"location": "s3://warehouse/database/location"} @@ -68,9 +82,10 @@ def fixture_namespace_properties() -> Properties: NestedField(2, "y", LongType(), doc="comment"), NestedField(3, "z", LongType()), ) -TEST_TABLE_LOCATION = "s3://bucket/test/location" TEST_TABLE_PARTITION_SPEC = PartitionSpec(PartitionField(name="x", transform=IdentityTransform(), source_id=1, field_id=1000)) TEST_TABLE_PROPERTIES = {"read.split.target.size": "134217728"} +TEST_TABLE_UUID = uuid.UUID("d20125c8-7284-442c-9aea-15fee620737c") +TEST_TIMESTAMP = 1602638573874 MOCK_ENVIRONMENT = {"PYICEBERG_CATALOG__PRODUCTION__URI": "test://doesnotexist"} @@ -88,7 +103,6 @@ def test_list_namespace(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -120,12 +134,13 @@ def test_describe_namespace_does_not_exists(catalog: InMemoryCatalog) -> None: assert result.output == "Namespace does not exist: ('doesnotexist',)\n" -def test_describe_table(catalog: InMemoryCatalog) -> None: +@pytest.fixture() +def test_describe_table(catalog: InMemoryCatalog, mock_datetime_now: None) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, + table_uuid=TEST_TABLE_UUID, ) runner = CliRunner() @@ -134,7 +149,7 @@ def test_describe_table(catalog: InMemoryCatalog) -> None: assert ( # Strip the whitespace on the end "\n".join([line.rstrip() for line in result.output.split("\n")]) - == """Table format version 1 + == """Table format version 2 Metadata location s3://warehouse/default/my_table/metadata/metadata.json Table UUID d20125c8-7284-442c-9aea-15fee620737c Last Updated 1602638573874 @@ -167,7 +182,6 @@ def test_schema(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -196,7 +210,6 @@ def test_spec(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -225,8 +238,8 @@ def test_uuid(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, + table_uuid=TEST_TABLE_UUID, ) runner = CliRunner() @@ -248,14 +261,12 @@ def test_location(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) - runner = CliRunner() result = runner.invoke(run, ["location", "default.my_table"]) assert result.exit_code == 0 - assert result.output == """s3://bucket/test/location\n""" + assert result.output == f"""{catalog._warehouse_location}/default/my_table\n""" def test_location_does_not_exists(catalog: InMemoryCatalog) -> None: @@ -271,7 +282,6 @@ def test_drop_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -328,7 +338,6 @@ def test_rename_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -351,7 +360,6 @@ def test_properties_get_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -366,7 +374,6 @@ def test_properties_get_table_specific_property(catalog: InMemoryCatalog) -> Non catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -381,7 +388,6 @@ def test_properties_get_table_specific_property_that_doesnt_exist(catalog: InMem catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -450,7 +456,6 @@ def test_properties_set_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -491,7 +496,6 @@ def test_properties_remove_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -506,7 +510,6 @@ def test_properties_remove_table_property_does_not_exists(catalog: InMemoryCatal catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -538,7 +541,6 @@ def test_json_list_namespace(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -566,12 +568,13 @@ def test_json_describe_namespace_does_not_exists(catalog: InMemoryCatalog) -> No assert result.output == """{"type": "NoSuchNamespaceError", "message": "Namespace does not exist: ('doesnotexist',)"}\n""" -def test_json_describe_table(catalog: InMemoryCatalog) -> None: +@pytest.fixture() +def test_json_describe_table(catalog: InMemoryCatalog, mock_datetime_now: None) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, + table_uuid=TEST_TABLE_UUID, ) runner = CliRunner() @@ -579,7 +582,7 @@ def test_json_describe_table(catalog: InMemoryCatalog) -> None: assert result.exit_code == 0 assert ( result.output - == """{"identifier":["default","my_table"],"metadata_location":"s3://warehouse/default/my_table/metadata/metadata.json","metadata":{"location":"s3://bucket/test/location","table-uuid":"d20125c8-7284-442c-9aea-15fee620737c","last-updated-ms":1602638573874,"last-column-id":3,"schemas":[{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]}],"current-schema-id":0,"partition-specs":[{"spec-id":0,"fields":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}],"default-spec-id":0,"last-partition-id":1000,"properties":{},"snapshots":[{"snapshot-id":1925,"timestamp-ms":1602638573822}],"snapshot-log":[],"metadata-log":[],"sort-orders":[{"order-id":0,"fields":[]}],"default-sort-order-id":0,"refs":{},"format-version":1,"schema":{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]},"partition-spec":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}}\n""" + == """{"identifier":["default","my_table"],"metadata_location":"s3://warehouse/default/my_table/metadata/metadata.json","metadata":{"location":"s3://bucket/test/location","table-uuid":"d20125c8-7284-442c-9aea-15fee620737c","last-updated-ms":1602638573874,"last-column-id":3,"schemas":[{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]}],"current-schema-id":0,"partition-specs":[{"spec-id":0,"fields":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}],"default-spec-id":0,"last-partition-id":1000,"properties":{},"snapshots":[{"snapshot-id":1925,"timestamp-ms":1602638573822}],"snapshot-log":[],"metadata-log":[],"sort-orders":[{"order-id":0,"fields":[]}],"default-sort-order-id":0,"refs":{},"format-version":2,"schema":{"type":"struct","fields":[{"id":1,"name":"x","type":"long","required":true},{"id":2,"name":"y","type":"long","required":true,"doc":"comment"},{"id":3,"name":"z","type":"long","required":true}],"schema-id":0,"identifier-field-ids":[]},"partition-spec":[{"source-id":1,"field-id":1000,"transform":"identity","name":"x"}]}}\n""" ) @@ -599,7 +602,6 @@ def test_json_schema(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -625,7 +627,6 @@ def test_json_spec(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -648,8 +649,8 @@ def test_json_uuid(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, + table_uuid=TEST_TABLE_UUID, ) runner = CliRunner() @@ -671,14 +672,13 @@ def test_json_location(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) runner = CliRunner() result = runner.invoke(run, ["--output=json", "location", "default.my_table"]) assert result.exit_code == 0 - assert result.output == """"s3://bucket/test/location"\n""" + assert result.output == f'"{catalog._warehouse_location}/default/my_table"\n' def test_json_location_does_not_exists(catalog: InMemoryCatalog) -> None: @@ -694,7 +694,6 @@ def test_json_drop_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -735,7 +734,6 @@ def test_json_rename_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, ) @@ -758,7 +756,6 @@ def test_json_properties_get_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -773,7 +770,6 @@ def test_json_properties_get_table_specific_property(catalog: InMemoryCatalog) - catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -788,7 +784,6 @@ def test_json_properties_get_table_specific_property_that_doesnt_exist(catalog: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -862,7 +857,6 @@ def test_json_properties_set_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -908,7 +902,6 @@ def test_json_properties_remove_table(catalog: InMemoryCatalog) -> None: catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, ) @@ -923,7 +916,6 @@ def test_json_properties_remove_table_property_does_not_exists(catalog: InMemory catalog.create_table( identifier=TEST_TABLE_IDENTIFIER, schema=TEST_TABLE_SCHEMA, - location=TEST_TABLE_LOCATION, partition_spec=TEST_TABLE_PARTITION_SPEC, properties=TEST_TABLE_PROPERTIES, )