From 3aa2589e0ad7c200d3bc0efe5a0ff2096b691a58 Mon Sep 17 00:00:00 2001 From: fern-api <115122769+fern-api[bot]@users.noreply.github.com> Date: Tue, 11 Nov 2025 21:19:05 +0000 Subject: [PATCH] SDK Generation --- .fern/metadata.json | 9 + .github/workflows/ci.yml | 4 - LICENSE | 2 +- README.md | 3 +- poetry.lock | 103 +++++----- pyproject.toml | 7 +- requirements.txt | 1 - src/anduril/core/client_wrapper.py | 4 +- src/anduril/core/http_sse/__init__.py | 42 ++++ src/anduril/core/http_sse/_api.py | 112 +++++++++++ src/anduril/core/http_sse/_decoders.py | 61 ++++++ src/anduril/core/http_sse/_exceptions.py | 7 + src/anduril/core/http_sse/_models.py | 17 ++ src/anduril/core/pydantic_utilities.py | 4 +- src/anduril/entities/raw_client.py | 180 ++++++++++-------- .../types/stream_entities_response.py | 4 +- src/anduril/errors/bad_request_error.py | 2 +- src/anduril/errors/content_too_large_error.py | 2 +- .../errors/insufficient_storage_error.py | 2 +- src/anduril/errors/internal_server_error.py | 2 +- src/anduril/errors/not_found_error.py | 2 +- src/anduril/errors/request_timeout_error.py | 2 +- src/anduril/errors/too_many_requests_error.py | 2 +- src/anduril/errors/unauthorized_error.py | 2 +- src/anduril/objects/raw_client.py | 152 +++++++-------- src/anduril/tasks/raw_client.py | 104 +++++----- src/anduril/types/pose.py | 6 +- src/anduril/types/status.py | 21 +- 28 files changed, 559 insertions(+), 300 deletions(-) create mode 100644 .fern/metadata.json create mode 100644 src/anduril/core/http_sse/__init__.py create mode 100644 src/anduril/core/http_sse/_api.py create mode 100644 src/anduril/core/http_sse/_decoders.py create mode 100644 src/anduril/core/http_sse/_exceptions.py create mode 100644 src/anduril/core/http_sse/_models.py diff --git a/.fern/metadata.json b/.fern/metadata.json new file mode 100644 index 0000000..1801f3e --- /dev/null +++ b/.fern/metadata.json @@ -0,0 +1,9 @@ +{ + "cliVersion": "0.0.0", + "generatorName": "fernapi/fern-python-sdk", + "generatorVersion": "99.99.99", + "generatorConfig": { + "client_class_name": "Lattice", + "package_name": "anduril" + } +} \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9818c03..0455708 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,8 +1,6 @@ name: ci - on: [push] jobs: - compile: runs-on: ubuntu-latest steps: - name: Checkout repo @@ -10,8 +8,6 @@ jobs: - name: Set up python uses: actions/setup-python@v4 with: - python-version: 3.8 - - name: Bootstrap poetry run: | curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1 - name: Install dependencies diff --git a/LICENSE b/LICENSE index 9623e9f..87b1a69 100644 --- a/LICENSE +++ b/LICENSE @@ -186,4 +186,4 @@ of any court action, you agree to submit to the exclusive jurisdiction of the co Notwithstanding this, you agree that Anduril shall still be allowed to apply for injunctive remedies (or an equivalent type of urgent legal relief) in any jurisdiction. -**April 14, 2025** \ No newline at end of file +**April 14, 2025** diff --git a/README.md b/README.md index b7a692a..da22b31 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,6 @@ ![](https://www.anduril.com/lattice-sdk/) -[![fern shield](https://img.shields.io/badge/%F0%9F%8C%BF-Built%20with%20Fern-brightgreen)](https://buildwithfern.com?utm_source=github&utm_medium=github&utm_campaign=readme&utm_source=https%3A%2F%2Fgithub.com%2Fanduril%2Flattice-sdk-python) [![pypi](https://img.shields.io/pypi/v/anduril-lattice-sdk)](https://pypi.python.org/pypi/anduril-lattice-sdk) The Lattice SDK Python library provides convenient access to the Lattice SDK APIs from Python. @@ -29,7 +28,7 @@ For support with this library please reach out to your Anduril representative. ## Reference -A full reference for this library is available [here](https://github.com/anduril/lattice-sdk-python/blob/HEAD/./reference.md). +A full reference for this library is available [here](https://github.com/fern-api/lattice-sdk-python/blob/HEAD/./reference.md). ## Usage diff --git a/poetry.lock b/poetry.lock index 0985d99..fef3795 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,13 +38,13 @@ trio = ["trio (>=0.26.1)"] [[package]] name = "certifi" -version = "2025.8.3" +version = "2025.10.5" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" files = [ - {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, - {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, + {file = "certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"}, + {file = "certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"}, ] [[package]] @@ -131,26 +131,15 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "httpx-sse" -version = "0.4.0" -description = "Consume Server-Sent Event (SSE) messages with HTTPX." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, - {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, -] - [[package]] name = "idna" -version = "3.10" +version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, ] [package.extras] @@ -494,43 +483,53 @@ files = [ [[package]] name = "tomli" -version = "2.2.1" +version = "2.3.0" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, ] [[package]] @@ -558,4 +557,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "3c4bf0b75d27d2ce3738ca3d6b64dfb03909c56fb8e280a98890abd4619134d8" +content-hash = "8551b871abee465e23fb0966d51f2c155fd257b55bdcb0c02d095de19f92f358" diff --git a/pyproject.toml b/pyproject.toml index 68c4a4b..633c40c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "anduril-lattice-sdk" [tool.poetry] name = "anduril-lattice-sdk" -version = "2.3.0" +version = "3.0.1" description = "HTTP clients for the Anduril Lattice SDK" readme = "README.md" authors = [ @@ -35,15 +35,14 @@ packages = [ { include = "anduril", from = "src"} ] -[project.urls] +[tool.poetry.urls] Documentation = 'https://developer.anduril.com' Homepage = 'https://www.anduril.com/lattice-sdk/' -Repository = 'https://github.com/anduril/lattice-sdk-python' +Repository = 'https://github.com/fern-api/lattice-sdk-python' [tool.poetry.dependencies] python = "^3.8" httpx = ">=0.21.2" -httpx-sse = "0.4.0" pydantic = ">= 1.9.2" pydantic-core = ">=2.18.2" typing_extensions = ">= 4.0.0" diff --git a/requirements.txt b/requirements.txt index f129cb3..e80f640 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,4 @@ httpx>=0.21.2 -httpx-sse==0.4.0 pydantic>= 1.9.2 pydantic-core>=2.18.2 typing_extensions>= 4.0.0 diff --git a/src/anduril/core/client_wrapper.py b/src/anduril/core/client_wrapper.py index b258a72..979c49d 100644 --- a/src/anduril/core/client_wrapper.py +++ b/src/anduril/core/client_wrapper.py @@ -22,10 +22,10 @@ def __init__( def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { - "User-Agent": "anduril-lattice-sdk/2.3.0", + "User-Agent": "anduril-lattice-sdk/3.0.1", "X-Fern-Language": "Python", "X-Fern-SDK-Name": "anduril-lattice-sdk", - "X-Fern-SDK-Version": "2.3.0", + "X-Fern-SDK-Version": "3.0.1", **(self.get_custom_headers() or {}), } token = self._get_token() diff --git a/src/anduril/core/http_sse/__init__.py b/src/anduril/core/http_sse/__init__.py new file mode 100644 index 0000000..730e5a3 --- /dev/null +++ b/src/anduril/core/http_sse/__init__.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from ._api import EventSource, aconnect_sse, connect_sse + from ._exceptions import SSEError + from ._models import ServerSentEvent +_dynamic_imports: typing.Dict[str, str] = { + "EventSource": "._api", + "SSEError": "._exceptions", + "ServerSentEvent": "._models", + "aconnect_sse": "._api", + "connect_sse": "._api", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}") + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e + except AttributeError as e: + raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + + +__all__ = ["EventSource", "SSEError", "ServerSentEvent", "aconnect_sse", "connect_sse"] diff --git a/src/anduril/core/http_sse/_api.py b/src/anduril/core/http_sse/_api.py new file mode 100644 index 0000000..f900b3b --- /dev/null +++ b/src/anduril/core/http_sse/_api.py @@ -0,0 +1,112 @@ +# This file was auto-generated by Fern from our API Definition. + +import re +from contextlib import asynccontextmanager, contextmanager +from typing import Any, AsyncGenerator, AsyncIterator, Iterator, cast + +import httpx +from ._decoders import SSEDecoder +from ._exceptions import SSEError +from ._models import ServerSentEvent + + +class EventSource: + def __init__(self, response: httpx.Response) -> None: + self._response = response + + def _check_content_type(self) -> None: + content_type = self._response.headers.get("content-type", "").partition(";")[0] + if "text/event-stream" not in content_type: + raise SSEError( + f"Expected response header Content-Type to contain 'text/event-stream', got {content_type!r}" + ) + + def _get_charset(self) -> str: + """Extract charset from Content-Type header, fallback to UTF-8.""" + content_type = self._response.headers.get("content-type", "") + + # Parse charset parameter using regex + charset_match = re.search(r"charset=([^;\s]+)", content_type, re.IGNORECASE) + if charset_match: + charset = charset_match.group(1).strip("\"'") + # Validate that it's a known encoding + try: + # Test if the charset is valid by trying to encode/decode + "test".encode(charset).decode(charset) + return charset + except (LookupError, UnicodeError): + # If charset is invalid, fall back to UTF-8 + pass + + # Default to UTF-8 if no charset specified or invalid charset + return "utf-8" + + @property + def response(self) -> httpx.Response: + return self._response + + def iter_sse(self) -> Iterator[ServerSentEvent]: + self._check_content_type() + decoder = SSEDecoder() + charset = self._get_charset() + + buffer = "" + for chunk in self._response.iter_bytes(): + # Decode chunk using detected charset + text_chunk = chunk.decode(charset, errors="replace") + buffer += text_chunk + + # Process complete lines + while "\n" in buffer: + line, buffer = buffer.split("\n", 1) + line = line.rstrip("\r") + sse = decoder.decode(line) + # when we reach a "\n\n" => line = '' + # => decoder will attempt to return an SSE Event + if sse is not None: + yield sse + + # Process any remaining data in buffer + if buffer.strip(): + line = buffer.rstrip("\r") + sse = decoder.decode(line) + if sse is not None: + yield sse + + async def aiter_sse(self) -> AsyncGenerator[ServerSentEvent, None]: + self._check_content_type() + decoder = SSEDecoder() + lines = cast(AsyncGenerator[str, None], self._response.aiter_lines()) + try: + async for line in lines: + line = line.rstrip("\n") + sse = decoder.decode(line) + if sse is not None: + yield sse + finally: + await lines.aclose() + + +@contextmanager +def connect_sse(client: httpx.Client, method: str, url: str, **kwargs: Any) -> Iterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) + + +@asynccontextmanager +async def aconnect_sse( + client: httpx.AsyncClient, + method: str, + url: str, + **kwargs: Any, +) -> AsyncIterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + async with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) diff --git a/src/anduril/core/http_sse/_decoders.py b/src/anduril/core/http_sse/_decoders.py new file mode 100644 index 0000000..339b089 --- /dev/null +++ b/src/anduril/core/http_sse/_decoders.py @@ -0,0 +1,61 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import List, Optional + +from ._models import ServerSentEvent + + +class SSEDecoder: + def __init__(self) -> None: + self._event = "" + self._data: List[str] = [] + self._last_event_id = "" + self._retry: Optional[int] = None + + def decode(self, line: str) -> Optional[ServerSentEvent]: + # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 + + if not line: + if not self._event and not self._data and not self._last_event_id and self._retry is None: + return None + + sse = ServerSentEvent( + event=self._event, + data="\n".join(self._data), + id=self._last_event_id, + retry=self._retry, + ) + + # NOTE: as per the SSE spec, do not reset last_event_id. + self._event = "" + self._data = [] + self._retry = None + + return sse + + if line.startswith(":"): + return None + + fieldname, _, value = line.partition(":") + + if value.startswith(" "): + value = value[1:] + + if fieldname == "event": + self._event = value + elif fieldname == "data": + self._data.append(value) + elif fieldname == "id": + if "\0" in value: + pass + else: + self._last_event_id = value + elif fieldname == "retry": + try: + self._retry = int(value) + except (TypeError, ValueError): + pass + else: + pass # Field is ignored. + + return None diff --git a/src/anduril/core/http_sse/_exceptions.py b/src/anduril/core/http_sse/_exceptions.py new file mode 100644 index 0000000..81605a8 --- /dev/null +++ b/src/anduril/core/http_sse/_exceptions.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import httpx + + +class SSEError(httpx.TransportError): + pass diff --git a/src/anduril/core/http_sse/_models.py b/src/anduril/core/http_sse/_models.py new file mode 100644 index 0000000..1af57f8 --- /dev/null +++ b/src/anduril/core/http_sse/_models.py @@ -0,0 +1,17 @@ +# This file was auto-generated by Fern from our API Definition. + +import json +from dataclasses import dataclass +from typing import Any, Optional + + +@dataclass(frozen=True) +class ServerSentEvent: + event: str = "message" + data: str = "" + id: str = "" + retry: Optional[int] = None + + def json(self) -> Any: + """Parse the data field as JSON.""" + return json.loads(self.data) diff --git a/src/anduril/core/pydantic_utilities.py b/src/anduril/core/pydantic_utilities.py index 8906cdf..185e5c4 100644 --- a/src/anduril/core/pydantic_utilities.py +++ b/src/anduril/core/pydantic_utilities.py @@ -220,7 +220,9 @@ def universal_root_validator( ) -> Callable[[AnyCallable], AnyCallable]: def decorator(func: AnyCallable) -> AnyCallable: if IS_PYDANTIC_V2: - return cast(AnyCallable, pydantic.model_validator(mode="before" if pre else "after")(func)) # type: ignore[attr-defined] + # In Pydantic v2, for RootModel we always use "before" mode + # The custom validators transform the input value before the model is created + return cast(AnyCallable, pydantic.model_validator(mode="before")(func)) # type: ignore[attr-defined] return cast(AnyCallable, pydantic.root_validator(pre=pre)(func)) # type: ignore[call-overload] return decorator diff --git a/src/anduril/entities/raw_client.py b/src/anduril/entities/raw_client.py index 3cce608..b32e724 100644 --- a/src/anduril/entities/raw_client.py +++ b/src/anduril/entities/raw_client.py @@ -2,14 +2,14 @@ import contextlib import datetime as dt -import json import typing from json.decoder import JSONDecodeError +from logging import error, warning -import httpx_sse from ..core.api_error import ApiError from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.http_sse._api import EventSource from ..core.jsonable_encoder import jsonable_encoder from ..core.pydantic_utilities import parse_obj_as from ..core.request_options import RequestOptions @@ -374,9 +374,9 @@ def publish_entity( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -385,9 +385,9 @@ def publish_entity( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -433,9 +433,9 @@ def get_entity( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -444,9 +444,9 @@ def get_entity( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -455,9 +455,9 @@ def get_entity( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -537,9 +537,9 @@ def override_entity( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -548,9 +548,9 @@ def override_entity( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -559,9 +559,9 @@ def override_entity( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -612,9 +612,9 @@ def remove_entity_override( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -623,9 +623,9 @@ def remove_entity_override( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -634,9 +634,9 @@ def remove_entity_override( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -707,9 +707,9 @@ def long_poll_entity_events( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -718,9 +718,9 @@ def long_poll_entity_events( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -729,9 +729,9 @@ def long_poll_entity_events( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -740,9 +740,9 @@ def long_poll_entity_events( raise RequestTimeoutError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -751,9 +751,9 @@ def long_poll_entity_events( raise TooManyRequestsError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -814,7 +814,7 @@ def _stream() -> HttpResponse[typing.Iterator[StreamEntitiesResponse]]: if 200 <= _response.status_code < 300: def _iter(): - _event_source = httpx_sse.EventSource(_response) + _event_source = EventSource(_response) for _sse in _event_source.iter_sse(): if _sse.data == None: return @@ -823,11 +823,19 @@ def _iter(): StreamEntitiesResponse, parse_obj_as( type_=StreamEntitiesResponse, # type: ignore - object_=json.loads(_sse.data), + object_=_sse.json(), ), ) - except Exception: - pass + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) return return HttpResponse(response=_response, data=_iter()) @@ -836,9 +844,9 @@ def _iter(): raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -847,9 +855,9 @@ def _iter(): raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1179,9 +1187,9 @@ async def publish_entity( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1190,9 +1198,9 @@ async def publish_entity( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1238,9 +1246,9 @@ async def get_entity( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1249,9 +1257,9 @@ async def get_entity( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1260,9 +1268,9 @@ async def get_entity( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1342,9 +1350,9 @@ async def override_entity( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1353,9 +1361,9 @@ async def override_entity( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1364,9 +1372,9 @@ async def override_entity( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1417,9 +1425,9 @@ async def remove_entity_override( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1428,9 +1436,9 @@ async def remove_entity_override( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1439,9 +1447,9 @@ async def remove_entity_override( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1512,9 +1520,9 @@ async def long_poll_entity_events( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1523,9 +1531,9 @@ async def long_poll_entity_events( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1534,9 +1542,9 @@ async def long_poll_entity_events( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1545,9 +1553,9 @@ async def long_poll_entity_events( raise RequestTimeoutError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1556,9 +1564,9 @@ async def long_poll_entity_events( raise TooManyRequestsError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1619,7 +1627,7 @@ async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[StreamEntitiesResp if 200 <= _response.status_code < 300: async def _iter(): - _event_source = httpx_sse.EventSource(_response) + _event_source = EventSource(_response) async for _sse in _event_source.aiter_sse(): if _sse.data == None: return @@ -1628,11 +1636,19 @@ async def _iter(): StreamEntitiesResponse, parse_obj_as( type_=StreamEntitiesResponse, # type: ignore - object_=json.loads(_sse.data), + object_=_sse.json(), ), ) - except Exception: - pass + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) return return AsyncHttpResponse(response=_response, data=_iter()) @@ -1641,9 +1657,9 @@ async def _iter(): raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -1652,9 +1668,9 @@ async def _iter(): raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), diff --git a/src/anduril/entities/types/stream_entities_response.py b/src/anduril/entities/types/stream_entities_response.py index 068e8e0..abf5634 100644 --- a/src/anduril/entities/types/stream_entities_response.py +++ b/src/anduril/entities/types/stream_entities_response.py @@ -56,5 +56,7 @@ class Config: from ...types.override import Override # noqa: E402, F401, I001 from ...types.overrides import Overrides # noqa: E402, F401, I001 -StreamEntitiesResponse = typing.Union[StreamEntitiesResponse_Heartbeat, StreamEntitiesResponse_Entity] +StreamEntitiesResponse = typing_extensions.Annotated[ + typing.Union[StreamEntitiesResponse_Heartbeat, StreamEntitiesResponse_Entity], pydantic.Field(discriminator="event") +] update_forward_refs(StreamEntitiesResponse_Entity) diff --git a/src/anduril/errors/bad_request_error.py b/src/anduril/errors/bad_request_error.py index baf5be4..ec78e26 100644 --- a/src/anduril/errors/bad_request_error.py +++ b/src/anduril/errors/bad_request_error.py @@ -6,5 +6,5 @@ class BadRequestError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + def __init__(self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=400, headers=headers, body=body) diff --git a/src/anduril/errors/content_too_large_error.py b/src/anduril/errors/content_too_large_error.py index 2334086..28d4f48 100644 --- a/src/anduril/errors/content_too_large_error.py +++ b/src/anduril/errors/content_too_large_error.py @@ -6,5 +6,5 @@ class ContentTooLargeError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + def __init__(self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=413, headers=headers, body=body) diff --git a/src/anduril/errors/insufficient_storage_error.py b/src/anduril/errors/insufficient_storage_error.py index fd084e4..f0dc628 100644 --- a/src/anduril/errors/insufficient_storage_error.py +++ b/src/anduril/errors/insufficient_storage_error.py @@ -6,5 +6,5 @@ class InsufficientStorageError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + def __init__(self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=507, headers=headers, body=body) diff --git a/src/anduril/errors/internal_server_error.py b/src/anduril/errors/internal_server_error.py index 14313ab..fabcc45 100644 --- a/src/anduril/errors/internal_server_error.py +++ b/src/anduril/errors/internal_server_error.py @@ -6,5 +6,5 @@ class InternalServerError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + def __init__(self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=500, headers=headers, body=body) diff --git a/src/anduril/errors/not_found_error.py b/src/anduril/errors/not_found_error.py index dcd60e3..75f557d 100644 --- a/src/anduril/errors/not_found_error.py +++ b/src/anduril/errors/not_found_error.py @@ -6,5 +6,5 @@ class NotFoundError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + def __init__(self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=404, headers=headers, body=body) diff --git a/src/anduril/errors/request_timeout_error.py b/src/anduril/errors/request_timeout_error.py index df4d2d4..8406576 100644 --- a/src/anduril/errors/request_timeout_error.py +++ b/src/anduril/errors/request_timeout_error.py @@ -6,5 +6,5 @@ class RequestTimeoutError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + def __init__(self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=408, headers=headers, body=body) diff --git a/src/anduril/errors/too_many_requests_error.py b/src/anduril/errors/too_many_requests_error.py index 2705399..705d6f1 100644 --- a/src/anduril/errors/too_many_requests_error.py +++ b/src/anduril/errors/too_many_requests_error.py @@ -6,5 +6,5 @@ class TooManyRequestsError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + def __init__(self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=429, headers=headers, body=body) diff --git a/src/anduril/errors/unauthorized_error.py b/src/anduril/errors/unauthorized_error.py index c83b25c..7e48bb6 100644 --- a/src/anduril/errors/unauthorized_error.py +++ b/src/anduril/errors/unauthorized_error.py @@ -6,5 +6,5 @@ class UnauthorizedError(ApiError): - def __init__(self, body: typing.Optional[typing.Any], headers: typing.Optional[typing.Dict[str, str]] = None): + def __init__(self, body: typing.Any, headers: typing.Optional[typing.Dict[str, str]] = None): super().__init__(status_code=401, headers=headers, body=body) diff --git a/src/anduril/objects/raw_client.py b/src/anduril/objects/raw_client.py index a93531b..c21b31b 100644 --- a/src/anduril/objects/raw_client.py +++ b/src/anduril/objects/raw_client.py @@ -102,9 +102,9 @@ def list_objects( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -113,9 +113,9 @@ def list_objects( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -124,9 +124,9 @@ def list_objects( raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -189,9 +189,9 @@ def _stream() -> HttpResponse[typing.Iterator[bytes]]: raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -200,9 +200,9 @@ def _stream() -> HttpResponse[typing.Iterator[bytes]]: raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -211,9 +211,9 @@ def _stream() -> HttpResponse[typing.Iterator[bytes]]: raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -222,9 +222,9 @@ def _stream() -> HttpResponse[typing.Iterator[bytes]]: raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -287,9 +287,9 @@ def upload_object( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -298,9 +298,9 @@ def upload_object( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -309,9 +309,9 @@ def upload_object( raise ContentTooLargeError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -320,9 +320,9 @@ def upload_object( raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -331,9 +331,9 @@ def upload_object( raise InsufficientStorageError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -373,9 +373,9 @@ def delete_object( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -384,9 +384,9 @@ def delete_object( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -395,9 +395,9 @@ def delete_object( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -406,9 +406,9 @@ def delete_object( raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -448,9 +448,9 @@ def get_object_metadata( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -459,9 +459,9 @@ def get_object_metadata( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -470,9 +470,9 @@ def get_object_metadata( raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -561,9 +561,9 @@ async def _get_next(): raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -572,9 +572,9 @@ async def _get_next(): raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -583,9 +583,9 @@ async def _get_next(): raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -649,9 +649,9 @@ async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -660,9 +660,9 @@ async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -671,9 +671,9 @@ async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -682,9 +682,9 @@ async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[bytes]]: raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -747,9 +747,9 @@ async def upload_object( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -758,9 +758,9 @@ async def upload_object( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -769,9 +769,9 @@ async def upload_object( raise ContentTooLargeError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -780,9 +780,9 @@ async def upload_object( raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -791,9 +791,9 @@ async def upload_object( raise InsufficientStorageError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -833,9 +833,9 @@ async def delete_object( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -844,9 +844,9 @@ async def delete_object( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -855,9 +855,9 @@ async def delete_object( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -866,9 +866,9 @@ async def delete_object( raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -908,9 +908,9 @@ async def get_object_metadata( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -919,9 +919,9 @@ async def get_object_metadata( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -930,9 +930,9 @@ async def get_object_metadata( raise InternalServerError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), diff --git a/src/anduril/tasks/raw_client.py b/src/anduril/tasks/raw_client.py index 754d890..a326377 100644 --- a/src/anduril/tasks/raw_client.py +++ b/src/anduril/tasks/raw_client.py @@ -129,9 +129,9 @@ def create_task( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -140,9 +140,9 @@ def create_task( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -186,9 +186,9 @@ def get_task(self, task_id: str, *, request_options: typing.Optional[RequestOpti raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -197,9 +197,9 @@ def get_task(self, task_id: str, *, request_options: typing.Optional[RequestOpti raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -208,9 +208,9 @@ def get_task(self, task_id: str, *, request_options: typing.Optional[RequestOpti raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -288,9 +288,9 @@ def update_task_status( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -299,9 +299,9 @@ def update_task_status( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -310,9 +310,9 @@ def update_task_status( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -390,9 +390,9 @@ def query_tasks( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -401,9 +401,9 @@ def query_tasks( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -412,9 +412,9 @@ def query_tasks( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -476,9 +476,9 @@ def listen_as_agent( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -487,9 +487,9 @@ def listen_as_agent( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -600,9 +600,9 @@ async def create_task( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -611,9 +611,9 @@ async def create_task( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -659,9 +659,9 @@ async def get_task( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -670,9 +670,9 @@ async def get_task( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -681,9 +681,9 @@ async def get_task( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -761,9 +761,9 @@ async def update_task_status( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -772,9 +772,9 @@ async def update_task_status( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -783,9 +783,9 @@ async def update_task_status( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -863,9 +863,9 @@ async def query_tasks( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -874,9 +874,9 @@ async def query_tasks( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -885,9 +885,9 @@ async def query_tasks( raise NotFoundError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -949,9 +949,9 @@ async def listen_as_agent( raise BadRequestError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), @@ -960,9 +960,9 @@ async def listen_as_agent( raise UnauthorizedError( headers=dict(_response.headers), body=typing.cast( - typing.Optional[typing.Any], + typing.Any, parse_obj_as( - type_=typing.Optional[typing.Any], # type: ignore + type_=typing.Any, # type: ignore object_=_response.json(), ), ), diff --git a/src/anduril/types/pose.py b/src/anduril/types/pose.py index 54e2d7d..a99d3b7 100644 --- a/src/anduril/types/pose.py +++ b/src/anduril/types/pose.py @@ -30,9 +30,9 @@ class Pose(UniversalBaseModel): Implementations of this quaternion should left multiply this quaternion to transform a point from the Pose frame to the enu frame. - Point posePt{1,0,0}; - Rotation attPoseToEnu{}; - Point = attPoseToEnu*posePt; + Point posePt{1,0,0}; + Rotation attPoseToEnu{}; + Point = attPoseToEnu*posePt; This transformed point represents some vector in ENU space that is aligned with the x axis of the attPoseToEnu matrix. diff --git a/src/anduril/types/status.py b/src/anduril/types/status.py index 0d92329..671df1e 100644 --- a/src/anduril/types/status.py +++ b/src/anduril/types/status.py @@ -3,28 +3,27 @@ import typing import pydantic +import typing_extensions from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel -from .google_protobuf_any import GoogleProtobufAny +from ..core.serialization import FieldMetadata class Status(UniversalBaseModel): """ - The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors). + Contains status of entities. """ - code: typing.Optional[int] = pydantic.Field(default=None) + platform_activity: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="platformActivity")] = ( + pydantic.Field(default=None) + ) """ - The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + A string that describes the activity that the entity is performing. + Examples include "RECONNAISSANCE", "INTERDICTION", "RETURN TO BASE (RTB)", "PREPARING FOR LAUNCH". """ - message: typing.Optional[str] = pydantic.Field(default=None) + role: typing.Optional[str] = pydantic.Field(default=None) """ - A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. - """ - - details: typing.Optional[typing.List[GoogleProtobufAny]] = pydantic.Field(default=None) - """ - A list of messages that carry the error details. There is a common set of message types for APIs to use. + A human-readable string that describes the role the entity is currently performing. E.g. "Team Member", "Commander". """ if IS_PYDANTIC_V2: