diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 7a4a7750450..33c0eb56237 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -3,11 +3,6 @@ name: Pull Request on: pull_request: types: [opened, synchronize] - branches: - - develop - - feat/* - - main - - release/v[0-9]+.[0-9]+.[0-9]+ concurrency: group: ${{ github.workflow }}-${{ github.ref }} diff --git a/app/package.json b/app/package.json index ae7ef97f042..6518070803b 100644 --- a/app/package.json +++ b/app/package.json @@ -56,5 +56,8 @@ "colormap": "^2.3.2", "react-player": "^2.16.0", "react-plotly.js": "^2.6.0" + }, + "resolutions": { + "brace-expansion": "^2.0.2" } } diff --git a/app/packages/embeddings/package.json b/app/packages/embeddings/package.json index 2eae2e6065d..4919cb4b3ac 100644 --- a/app/packages/embeddings/package.json +++ b/app/packages/embeddings/package.json @@ -16,7 +16,7 @@ "@fiftyone/components": "*", "@fiftyone/plugins": "*", "@fiftyone/state": "*", - "plotly.js": "^3.0.1", + "plotly.js": "^3.1.1", "use-resize-observer": "^9.0.2" }, "devDependencies": { diff --git a/app/packages/looker-3d/src/hooks/use-fo-loaders.ts b/app/packages/looker-3d/src/hooks/use-fo-loaders.ts index b1082cdb628..47f29d7d2b8 100644 --- a/app/packages/looker-3d/src/hooks/use-fo-loaders.ts +++ b/app/packages/looker-3d/src/hooks/use-fo-loaders.ts @@ -1,5 +1,13 @@ import { useLoader } from "@react-three/fiber"; +function ensureArray(value) { + if (Array.isArray(value)) { + return value; + } else { + return [value]; + } +} + /** * Decorates useLoader() to support credentials forwarding */ @@ -12,8 +20,17 @@ export function useFoLoader< loaderFunction?: Parameters[2] ) { return useLoader(loader, urls, (loaderInstance) => { - if (sessionStorage.getItem("customCredentialsAudience")?.length) { - loaderInstance.setWithCredentials(true); + const customCredentialsAudience = sessionStorage.getItem( + "customCredentialsAudience" + ); + if (customCredentialsAudience) { + // The types say that `urls` is string | string[] + // But! Our code also sometimes passes in string[][] + // So, we're both calling ensureArray() and flat() + const urlArray = ensureArray(urls).flat(); + if (urlArray.some((url) => url.includes(customCredentialsAudience))) { + loaderInstance.setWithCredentials(true); + } } if (loaderFunction) { loaderFunction(loaderInstance); diff --git a/app/yarn.lock b/app/yarn.lock index b530c53a3fb..2026afe2330 100644 --- a/app/yarn.lock +++ b/app/yarn.lock @@ -2389,7 +2389,7 @@ __metadata: "@fiftyone/components": "npm:*" "@fiftyone/plugins": "npm:*" "@fiftyone/state": "npm:*" - plotly.js: "npm:^3.0.1" + plotly.js: "npm:^3.1.1" typescript: "npm:^4.7.4" use-resize-observer: "npm:^9.0.2" vite: "npm:^5.4.20" @@ -3869,6 +3869,13 @@ __metadata: languageName: node linkType: hard +"@plotly/regl@npm:^2.1.2": + version: 2.1.2 + resolution: "@plotly/regl@npm:2.1.2" + checksum: 10/1e951d5161c3991efb98e3d33a38089306b4c2119d337d7f128d9474f2ff5eb91dd2fce7a0ca290e459432f10568f65ff3db37122c558fe55a270890093ca661 + languageName: node + linkType: hard + "@polka/url@npm:^1.0.0-next.24": version: 1.0.0-next.25 resolution: "@polka/url@npm:1.0.0-next.25" @@ -5451,13 +5458,6 @@ __metadata: languageName: node linkType: hard -"@types/less@npm:^3.0.3": - version: 3.0.8 - resolution: "@types/less@npm:3.0.8" - checksum: 10/7b4dd5d1fec813883c0f5e659a06cc0634da163925f3c9246cefcfb98f99682fd7135e2eb8dbba66ddfd582a8776a44ed78e06ebbfd89682771c8f12ebfe1146 - languageName: node - linkType: hard - "@types/lodash@npm:^4.14.182": version: 4.17.0 resolution: "@types/lodash@npm:4.17.0" @@ -5778,15 +5778,6 @@ __metadata: languageName: node linkType: hard -"@types/sass@npm:^1.43.1": - version: 1.43.1 - resolution: "@types/sass@npm:1.43.1" - dependencies: - "@types/node": "npm:*" - checksum: 10/8d0e839e29e7127013d14a24e1f97c7535cc8ac193dd00e14b035ccdda13c619f15bf3872aa8fc84b267eb65a984f0a45911b29ccd8c46a0e47834640b958212 - languageName: node - linkType: hard - "@types/scheduler@npm:*": version: 0.16.8 resolution: "@types/scheduler@npm:0.16.8" @@ -5847,15 +5838,6 @@ __metadata: languageName: node linkType: hard -"@types/stylus@npm:^0.48.38": - version: 0.48.43 - resolution: "@types/stylus@npm:0.48.43" - dependencies: - "@types/node": "npm:*" - checksum: 10/687c93767d105847947c1acc00ce7261a7bab17b7820bc9ae1920fd702a4b82a751df3adc30f7956db310f9801f8913642748e58c21983a2bd403128c5193a1d - languageName: node - linkType: hard - "@types/supercluster@npm:^7.1.3": version: 7.1.3 resolution: "@types/supercluster@npm:7.1.3" @@ -7136,22 +7118,12 @@ __metadata: languageName: node linkType: hard -"brace-expansion@npm:^1.1.7": - version: 1.1.11 - resolution: "brace-expansion@npm:1.1.11" - dependencies: - balanced-match: "npm:^1.0.0" - concat-map: "npm:0.0.1" - checksum: 10/faf34a7bb0c3fcf4b59c7808bc5d2a96a40988addf2e7e09dfbb67a2251800e0d14cd2bfc1aa79174f2f5095c54ff27f46fb1289fe2d77dac755b5eb3434cc07 - languageName: node - linkType: hard - -"brace-expansion@npm:^2.0.1": - version: 2.0.1 - resolution: "brace-expansion@npm:2.0.1" +"brace-expansion@npm:^2.0.2": + version: 2.0.2 + resolution: "brace-expansion@npm:2.0.2" dependencies: balanced-match: "npm:^1.0.0" - checksum: 10/a61e7cd2e8a8505e9f0036b3b6108ba5e926b4b55089eeb5550cd04a471fe216c96d4fe7e4c7f995c728c554ae20ddfc4244cad10aef255e72b62930afd233d1 + checksum: 10/01dff195e3646bc4b0d27b63d9bab84d2ebc06121ff5013ad6e5356daa5a9d6b60fa26cf73c74797f2dc3fbec112af13578d51f75228c1112b26c790a87b0488 languageName: node linkType: hard @@ -7799,13 +7771,6 @@ __metadata: languageName: node linkType: hard -"concat-map@npm:0.0.1": - version: 0.0.1 - resolution: "concat-map@npm:0.0.1" - checksum: 10/9680699c8e2b3af0ae22592cb764acaf973f292a7b71b8a06720233011853a58e256c89216a10cbe889727532fd77f8bcd49a760cedfde271b8e006c20e079f2 - languageName: node - linkType: hard - "concat-stream@npm:^1.5.2": version: 1.6.2 resolution: "concat-stream@npm:1.6.2" @@ -8072,30 +8037,6 @@ __metadata: languageName: node linkType: hard -"css-loader@npm:^7.1.2": - version: 7.1.2 - resolution: "css-loader@npm:7.1.2" - dependencies: - icss-utils: "npm:^5.1.0" - postcss: "npm:^8.4.33" - postcss-modules-extract-imports: "npm:^3.1.0" - postcss-modules-local-by-default: "npm:^4.0.5" - postcss-modules-scope: "npm:^3.2.0" - postcss-modules-values: "npm:^4.0.0" - postcss-value-parser: "npm:^4.2.0" - semver: "npm:^7.5.4" - peerDependencies: - "@rspack/core": 0.x || 1.x - webpack: ^5.27.0 - peerDependenciesMeta: - "@rspack/core": - optional: true - webpack: - optional: true - checksum: 10/ddde22fb103888320f60a1414a6a04638d2e9760a532a52d03c45e6e2830b32dd76c734aeef426f78dd95b2d15f77eeec3854ac53061aff02569732dc6e6801c - languageName: node - linkType: hard - "css-system-font-keywords@npm:^1.0.0": version: 1.0.0 resolution: "css-system-font-keywords@npm:1.0.0" @@ -9280,20 +9221,6 @@ __metadata: languageName: node linkType: hard -"esbuild-style-plugin@npm:^1.6.3": - version: 1.6.3 - resolution: "esbuild-style-plugin@npm:1.6.3" - dependencies: - "@types/less": "npm:^3.0.3" - "@types/sass": "npm:^1.43.1" - "@types/stylus": "npm:^0.48.38" - glob: "npm:^10.2.2" - postcss: "npm:^8.4.31" - postcss-modules: "npm:^6.0.0" - checksum: 10/764f946ff106afc2143fc08a76c2da58fa6dbabbbe2a76e71467d1d626d23e5961417d32cf5a74764584edadeb04d03a01ad682b2e814d03313be86b42f54a38 - languageName: node - linkType: hard - "esbuild@npm:^0.21.3": version: 0.21.5 resolution: "esbuild@npm:0.21.5" @@ -10449,15 +10376,6 @@ __metadata: languageName: node linkType: hard -"generic-names@npm:^4.0.0": - version: 4.0.0 - resolution: "generic-names@npm:4.0.0" - dependencies: - loader-utils: "npm:^3.2.0" - checksum: 10/ef05166395a17fbdcc7ceaa59635318b6ae89125391780c4d4abbc1e7ae7a6e07a31602fbc785860cf701cee08f790f71e286676c80db634f56d3d1af2703319 - languageName: node - linkType: hard - "gensync@npm:^1.0.0-beta.2": version: 1.0.0-beta.2 resolution: "gensync@npm:1.0.0-beta.2" @@ -12996,13 +12914,6 @@ __metadata: languageName: node linkType: hard -"loader-utils@npm:^3.2.0": - version: 3.3.1 - resolution: "loader-utils@npm:3.3.1" - checksum: 10/3f994a948ded4248569773f065b1f6d7c95da059888c8429153e203f9bdadfb1691ca517f9eac6548a8af2fe5c724a8e09cbb79f665db4209426606a57ec7650 - languageName: node - linkType: hard - "locate-path@npm:^5.0.0": version: 5.0.0 resolution: "locate-path@npm:5.0.0" @@ -15018,14 +14929,15 @@ __metadata: languageName: node linkType: hard -"plotly.js@npm:^3.0.1": - version: 3.0.1 - resolution: "plotly.js@npm:3.0.1" +"plotly.js@npm:^3.1.1": + version: 3.1.1 + resolution: "plotly.js@npm:3.1.1" dependencies: "@plotly/d3": "npm:3.8.2" "@plotly/d3-sankey": "npm:0.7.2" "@plotly/d3-sankey-circular": "npm:0.33.1" "@plotly/mapbox-gl": "npm:1.13.4" + "@plotly/regl": "npm:^2.1.2" "@turf/area": "npm:^7.1.0" "@turf/bbox": "npm:^7.1.0" "@turf/centroid": "npm:^7.1.0" @@ -15036,7 +14948,6 @@ __metadata: color-parse: "npm:2.0.0" color-rgba: "npm:3.0.0" country-regex: "npm:^1.1.0" - css-loader: "npm:^7.1.2" d3-force: "npm:^1.2.1" d3-format: "npm:^1.4.5" d3-geo: "npm:^1.12.1" @@ -15045,7 +14956,6 @@ __metadata: d3-interpolate: "npm:^3.0.1" d3-time: "npm:^1.1.0" d3-time-format: "npm:^2.2.3" - esbuild-style-plugin: "npm:^1.6.3" fast-isnumeric: "npm:^1.1.4" gl-mat4: "npm:^1.2.0" gl-text: "npm:^1.4.0" @@ -15061,21 +14971,19 @@ __metadata: point-in-polygon: "npm:^1.1.0" polybooljs: "npm:^1.2.2" probe-image-size: "npm:^7.2.3" - regl: "npm:@plotly/regl@^2.1.2" regl-error2d: "npm:^2.0.12" regl-line2d: "npm:^3.1.3" regl-scatter2d: "npm:^3.3.1" regl-splom: "npm:^1.0.14" strongly-connected-components: "npm:^1.0.1" - style-loader: "npm:^4.0.0" superscript-text: "npm:^1.0.0" svg-path-sdf: "npm:^1.1.3" tinycolor2: "npm:^1.4.2" to-px: "npm:1.0.1" topojson-client: "npm:^3.1.0" webgl-context: "npm:^2.2.0" - world-calendars: "npm:^1.0.3" - checksum: 10/f1b449f250a96b804f11170b3dcb01fb4fa7413f4078ac5cd499b2f9efed663fd85171f426dda57a73fba9999ec277439ab93e4f9aac1b971ab20a62ea483568 + world-calendars: "npm:^1.0.4" + checksum: 10/8df6d1dfd1b10088ac545181e97248e918b2ead4458991cb7ea3ec32be766466017b2ac417fc274e0261e672c5304ffe78819d82c7a693355008b9ac47465563 languageName: node linkType: hard @@ -15146,15 +15054,6 @@ __metadata: languageName: node linkType: hard -"postcss-modules-extract-imports@npm:^3.1.0": - version: 3.1.0 - resolution: "postcss-modules-extract-imports@npm:3.1.0" - peerDependencies: - postcss: ^8.1.0 - checksum: 10/00bfd3aff045fc13ded8e3bbfd8dfc73eff9a9708db1b2a132266aef6544c8d2aee7a5d7e021885f6f9bbd5565a9a9ab52990316e21ad9468a2534f87df8e849 - languageName: node - linkType: hard - "postcss-modules-local-by-default@npm:^4.0.4": version: 4.0.4 resolution: "postcss-modules-local-by-default@npm:4.0.4" @@ -15168,19 +15067,6 @@ __metadata: languageName: node linkType: hard -"postcss-modules-local-by-default@npm:^4.0.5": - version: 4.2.0 - resolution: "postcss-modules-local-by-default@npm:4.2.0" - dependencies: - icss-utils: "npm:^5.0.0" - postcss-selector-parser: "npm:^7.0.0" - postcss-value-parser: "npm:^4.1.0" - peerDependencies: - postcss: ^8.1.0 - checksum: 10/552329aa39fbf229b8ac5a04f8aed0b1553e7a3c10b165ee700d1deb020c071875b3df7ab5e3591f6af33d461df66d330ec9c1256229e45fc618a47c60f41536 - languageName: node - linkType: hard - "postcss-modules-scope@npm:^3.1.1": version: 3.1.1 resolution: "postcss-modules-scope@npm:3.1.1" @@ -15192,46 +15078,6 @@ __metadata: languageName: node linkType: hard -"postcss-modules-scope@npm:^3.2.0": - version: 3.2.1 - resolution: "postcss-modules-scope@npm:3.2.1" - dependencies: - postcss-selector-parser: "npm:^7.0.0" - peerDependencies: - postcss: ^8.1.0 - checksum: 10/51c747fa15cedf1b2856da472985ea7a7bb510a63daf30f95f250f34fce9e28ef69b802e6cc03f9c01f69043d171bc33279109a9235847c2d3a75c44eac67334 - languageName: node - linkType: hard - -"postcss-modules-values@npm:^4.0.0": - version: 4.0.0 - resolution: "postcss-modules-values@npm:4.0.0" - dependencies: - icss-utils: "npm:^5.0.0" - peerDependencies: - postcss: ^8.1.0 - checksum: 10/18021961a494e69e65da9e42b4436144c9ecee65845c9bfeff2b7a26ea73d60762f69e288be8bb645447965b8fd6b26a264771136810dc0172bd31b940aee4f2 - languageName: node - linkType: hard - -"postcss-modules@npm:^6.0.0": - version: 6.0.1 - resolution: "postcss-modules@npm:6.0.1" - dependencies: - generic-names: "npm:^4.0.0" - icss-utils: "npm:^5.1.0" - lodash.camelcase: "npm:^4.3.0" - postcss-modules-extract-imports: "npm:^3.1.0" - postcss-modules-local-by-default: "npm:^4.0.5" - postcss-modules-scope: "npm:^3.2.0" - postcss-modules-values: "npm:^4.0.0" - string-hash: "npm:^1.1.3" - peerDependencies: - postcss: ^8.0.0 - checksum: 10/d53bfc67bd8351ebca81505f53ea917bdb46bbb962b9a7c10ba0dfb123a61af88b9bee16ed7085a660de9557a379a3d9a0e72184148d14523c8602d23618e796 - languageName: node - linkType: hard - "postcss-selector-parser@npm:^6.0.2, postcss-selector-parser@npm:^6.0.4": version: 6.0.16 resolution: "postcss-selector-parser@npm:6.0.16" @@ -15242,17 +15088,7 @@ __metadata: languageName: node linkType: hard -"postcss-selector-parser@npm:^7.0.0": - version: 7.1.0 - resolution: "postcss-selector-parser@npm:7.1.0" - dependencies: - cssesc: "npm:^3.0.0" - util-deprecate: "npm:^1.0.2" - checksum: 10/2caf09e66e2be81d45538f8afdc5439298c89bea71e9943b364e69dce9443d9c5ab33f4dd8b237f1ed7d2f38530338dcc189c1219d888159e6afb5b0afe58b19 - languageName: node - linkType: hard - -"postcss-value-parser@npm:^4.0.2, postcss-value-parser@npm:^4.1.0, postcss-value-parser@npm:^4.2.0": +"postcss-value-parser@npm:^4.0.2, postcss-value-parser@npm:^4.1.0": version: 4.2.0 resolution: "postcss-value-parser@npm:4.2.0" checksum: 10/e4e4486f33b3163a606a6ed94f9c196ab49a37a7a7163abfcd469e5f113210120d70b8dd5e33d64636f41ad52316a3725655421eb9a1094f1bcab1db2f555c62 @@ -15292,17 +15128,6 @@ __metadata: languageName: node linkType: hard -"postcss@npm:^8.4.31, postcss@npm:^8.4.33, postcss@npm:^8.5.3": - version: 8.5.3 - resolution: "postcss@npm:8.5.3" - dependencies: - nanoid: "npm:^3.3.8" - picocolors: "npm:^1.1.1" - source-map-js: "npm:^1.2.1" - checksum: 10/6d7e21a772e8b05bf102636918654dac097bac013f0dc8346b72ac3604fc16829646f94ea862acccd8f82e910b00e2c11c1f0ea276543565d278c7ca35516a7c - languageName: node - linkType: hard - "postcss@npm:^8.4.43": version: 8.4.45 resolution: "postcss@npm:8.4.45" @@ -15314,6 +15139,17 @@ __metadata: languageName: node linkType: hard +"postcss@npm:^8.5.3": + version: 8.5.3 + resolution: "postcss@npm:8.5.3" + dependencies: + nanoid: "npm:^3.3.8" + picocolors: "npm:^1.1.1" + source-map-js: "npm:^1.2.1" + checksum: 10/6d7e21a772e8b05bf102636918654dac097bac013f0dc8346b72ac3604fc16829646f94ea862acccd8f82e910b00e2c11c1f0ea276543565d278c7ca35516a7c + languageName: node + linkType: hard + "potpack@npm:^1.0.1": version: 1.0.2 resolution: "potpack@npm:1.0.2" @@ -16285,13 +16121,6 @@ __metadata: languageName: node linkType: hard -"regl@npm:@plotly/regl@^2.1.2": - version: 2.1.2 - resolution: "@plotly/regl@npm:2.1.2" - checksum: 10/1e951d5161c3991efb98e3d33a38089306b4c2119d337d7f128d9474f2ff5eb91dd2fce7a0ca290e459432f10568f65ff3db37122c558fe55a270890093ca661 - languageName: node - linkType: hard - "regl@npm:^2.0.0": version: 2.1.0 resolution: "regl@npm:2.1.0" @@ -17464,13 +17293,6 @@ __metadata: languageName: node linkType: hard -"string-hash@npm:^1.1.3": - version: 1.1.3 - resolution: "string-hash@npm:1.1.3" - checksum: 10/104b8667a5e0dc71bfcd29fee09cb88c6102e27bfb07c55f95535d90587d016731d52299380052e514266f4028a7a5172e0d9ac58e2f8f5001be61dc77c0754d - languageName: node - linkType: hard - "string-length@npm:^4.0.1": version: 4.0.2 resolution: "string-length@npm:4.0.2" @@ -17673,15 +17495,6 @@ __metadata: languageName: node linkType: hard -"style-loader@npm:^4.0.0": - version: 4.0.0 - resolution: "style-loader@npm:4.0.0" - peerDependencies: - webpack: ^5.27.0 - checksum: 10/93f25b7e70cfca9d1d8427170384262b59a5b0e84e7191a5a26636a77799caeed46d9a3e45ee7b9afa0f69176e3b98d5a6c5e81593ff1fd0946f1c5682fd2a68 - languageName: node - linkType: hard - "style-to-object@npm:^0.4.0": version: 0.4.4 resolution: "style-to-object@npm:0.4.4" @@ -19569,12 +19382,12 @@ __metadata: languageName: node linkType: hard -"world-calendars@npm:^1.0.3": - version: 1.0.3 - resolution: "world-calendars@npm:1.0.3" +"world-calendars@npm:^1.0.4": + version: 1.0.4 + resolution: "world-calendars@npm:1.0.4" dependencies: object-assign: "npm:^4.1.0" - checksum: 10/f5405fed2d305998cb8c5a18c66fdc5a9885f4c44b3e5a59028c825256c75e57756f4db652af6f0eabcc63b29c361bd1a9cee15f743db42b8bf1032dff7c1277 + checksum: 10/c7dfbea3ba95f25bc5a675c5041b112bf7a5bbccf67cfddbd4cbda0cadc7078aaebd5341e4351ccc6a14a3d8187919767f8848abc4b74c5be8864cf04645a03f languageName: node linkType: hard diff --git a/docs/source/enterprise/api_connection.rst b/docs/source/enterprise/api_connection.rst index 4b5a5d2cf62..abaa725b68e 100644 --- a/docs/source/enterprise/api_connection.rst +++ b/docs/source/enterprise/api_connection.rst @@ -6,8 +6,8 @@ API Connection .. default-role:: code This page describes how to create API keys and configure your -:ref:`SDK installation ` to connect to your Enterprise -deployment's API. +:ref:`SDK installation ` to connect to your FiftyOne +Enterprise deployment. All actions taken via API connections are authenticated based on the user associated with the API key, which means that concepts like user roles and diff --git a/docs/source/enterprise/app.rst b/docs/source/enterprise/app.rst index bb257e234dc..b097d2a422d 100644 --- a/docs/source/enterprise/app.rst +++ b/docs/source/enterprise/app.rst @@ -126,9 +126,11 @@ description, and tags for the dataset: .. note:: - What next? Use the :ref:`Enterprise Python SDK ` to upload new + What next? Use the + :ref:`FiftyOne Enterprise Python SDK ` to upload new samples, labels, and metadata to your dataset. A common approach is to - automate this process via :ref:`cloud functions `. + automate this process via + :ref:`cloud functions `. .. _enterprise-using-datasets: diff --git a/docs/source/enterprise/getting_started.rst b/docs/source/enterprise/getting_started.rst new file mode 100644 index 00000000000..6f838129c39 --- /dev/null +++ b/docs/source/enterprise/getting_started.rst @@ -0,0 +1,329 @@ +.. _enterprise-getting-started: + +Getting Started with FiftyOne Enterprise +======================================== + +.. default-role:: code + +Follow this guide to create your first dataset in FiftyOne Enterprise 🚀 + +Configure cloud credentials +--------------------------- + +An :ref:`admin user ` must configure cloud credentials +**once** for a deployment in order for users to view datasets: + +.. image:: /images/enterprise/getting_started_cloud_creds.gif + :alt: getting-started-cloud-creds + :align: center + +.. _enterprise-getting-started-sdk: + +Create a dataset via the SDK +---------------------------- + +Install the FiftyOne Enterprise Python SDK +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +1. Navigate to the **Settings > API keys** page +2. Copy and execute the provided bash command to install the SDK in your + virtual environment + +.. image:: /images/enterprise/getting_started_install_sdk.gif + :alt: getting-started-install-sdk + :align: center + +If you plan to work with video datasets, you'll also need to install +`FFmpeg `_: + +.. tabs:: + + .. group-tab:: Linux + + .. code-block:: shell + + sudo apt install -y ffmpeg + + .. group-tab:: macOS + + .. code-block:: python + + brew install ffmpeg + + .. group-tab:: Windows + + You can download a Windows build from + `here `_. Unzip it and be + sure to add it to your path. + +Connect to your deployment +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To connect to your FiftyOne Enterprise deployment, you must provide your +:ref:`API URI and API key `: + +.. code-block:: shell + + export FIFTYONE_API_URI=XXXXXXXX + export FIFTYONE_API_KEY=YYYYYYYY + +You can create an API key and locate your deployment's URI on the +**Settings > API keys** page of the FiftyOne Enterprise App: + +.. image:: /images/enterprise/api_key_generate.png + :alt: api-key-generate + :align: center + +You can use the :ref:`fiftyone config ` CLI method to +verify that you have correctly configured your API URI and API key: + +.. code-block:: shell + + $ fiftyone config + { + ... + "api_uri": "XXXXXXXX", + "api_key": "YYYYYYYY", + ... + } + +You can also verify that your API connection is working correctly by executing +the following method: + +.. code-block:: python + + # if this fails, you may have the open source SDK installed + import fiftyone.management as fom + + # if this succeeds, your API connection is working + fom.test_api_connection() + +Set cloud credentials locally +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Next, configure the appropriate environment variables to register your +:ref:`cloud credentials ` in your local +environment: + +.. tabs:: + + .. group-tab:: AWS + + Set the following environment variables: + + .. code-block:: bash + + export AWS_ACCESS_KEY_ID=... + export AWS_SECRET_ACCESS_KEY=... + export AWS_DEFAULT_REGION=... + + .. group-tab:: GCP + + Set the following environment variable: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS="/path/to/your/service-account-key.json" + + .. group-tab:: Azure + + Set the following environment variables: + + .. code-block:: bash + + export AZURE_STORAGE_ACCOUNT=... + export AZURE_STORAGE_KEY=... + + .. group-tab:: MinIO + + Set the following environment variables: + + .. code-block:: bash + + export MINIO_ACCESS_KEY_ID=... + export MINIO_SECRET_ACCESS_KEY=... + export MINIO_DEFAULT_REGION=... + +Refer to :ref:`this page ` for more information about +interacting with cloud-backed media in FiftyOne Enterprise. + +Import your data +~~~~~~~~~~~~~~~~ + +The example code below shows the basic pattern for creating new datasets and +populating them via the FiftyOne Enterprise Python SDK: + +.. tabs:: + + .. group-tab:: AWS + + .. code-block:: python + + import fiftyone as fo + import fiftyone.core.storage as fos + + dataset = fo.Dataset("") + + s3_files = fos.list_files("s3:///", abs_paths=True) + + samples = [] + for s3_uri in s3_files: + if s3_uri.lower().endswith(".jpeg"): + sample = fo.Sample(filepath=s3_uri) + samples.append(sample) + + dataset.add_samples(samples) + + # You must mark the dataset as persistent to access it in the UI + dataset.persistent = True + + .. group-tab:: GCP + + .. code-block:: python + + import fiftyone as fo + import fiftyone.core.storage as fos + + dataset = fo.Dataset("") + + gcs_files = fos.list_files("gs:///", abs_paths=True) + + samples = [] + for gcs_uri in gcs_files: + if gcs_uri.lower().endswith(".jpeg"): + sample = fo.Sample(filepath=gcs_uri) + samples.append(sample) + + dataset.add_samples(samples) + + # You must mark the dataset as persistent to access it in the UI + dataset.persistent = True + + .. group-tab:: Azure + + .. code-block:: python + + import fiftyone as fo + import fiftyone.core.storage as fos + + dataset = fo.Dataset("") + + azure_files = fos.list_files( + "https://.blob.core.windows.net//", + abs_paths=True, + ) + + samples = [] + for azure_uri in azure_files: + if azure_uri.lower().endswith(".jpeg"): + sample = fo.Sample(filepath=azure_uri) + samples.append(sample) + + dataset.add_samples(samples) + + # You must mark the dataset as persistent to access it in the UI + dataset.persistent = True + + .. group-tab:: MinIO + + .. code-block:: python + + import fiftyone as fo + import fiftyone.core.storage as fos + + dataset = fo.Dataset("") + + minio_files = fos.list_files( + "https://minio.example.com//", + abs_paths=True, + ) + + samples = [] + for minio_uri in minio_files: + if minio_uri.lower().endswith(".jpeg"): + sample = fo.Sample(filepath=minio_uri) + samples.append(sample) + + dataset.add_samples(samples) + + # You must mark the dataset as persistent to access it in the UI + dataset.persistent = True + +Refer to :ref:`this page ` for more information about +importing your media and labels into FiftyOne via Python. + +Compute metadata +~~~~~~~~~~~~~~~~ + +All datasets/views provide a builtin +:meth:`compute_metadata() ` +method that you can invoke to efficiently populate the `metadata` field of your +samples with basic media type-specific metadata such as file size and +image/video dimensions for all samples in a collection: + +.. code-block:: python + + dataset.compute_metadata() + + sample = dataset.first() + print(sample.metadata) + +It is highly recommended to keep the `metadata` field populated for all samples +of your datasets because it provides useful information upon which to +search/filter and it enables the sample grid's tiling algorithm to run more +efficiently. + +You can verify that all samples in a dataset/view have metadata as follows: + +.. code-block:: python + + assert len(dataset.exists("metadata", False)) == 0 + +.. _enterprise-getting-started-ui: + +Create a dataset via the UI +--------------------------- + +.. note:: + + An admin must follow :ref:`these instructions ` + to install the + `@voxel51/io `_ + and + `@voxel51/utils `_ + plugins in order for users to perform imports and compute metadata via the + FiftyOne Enterprise UI. + +Import your data +~~~~~~~~~~~~~~~~ + +To create a new dataset, click on the "New dataset" button in the upper right +corner of the FiftyOne Enterprise homepage. A pop-up will appear alowing you to +choose a name and optional description/tags for the dataset: + +.. image:: /images/enterprise/create_dataset.png + :alt: create-dataset + :align: center + +You can then use the **import_samples** operator to import media and labels +stored in a cloud storage bucket: + +.. image:: /images/enterprise/getting_started_import_samples.gif + :alt: getting-started-install-sdk + :align: center + +Compute metadata +~~~~~~~~~~~~~~~~ + +You can use the **compute_metadata** operator to efficiently populate the +`metadata` field of your samples with basic media type-specific metadata such +as file size and image/video dimensions for all samples in a collection: + +.. image:: /images/enterprise/getting_started_schedule_compute_metadata.gif + :alt: getting-started-compute-metadata + :align: center + +It is highly recommended to keep the `metadata` field populated for all samples +of your datasets because it provides useful information upon which to +search/filter and it enables the sample grid's tiling algorithm to run more +efficiently. diff --git a/docs/source/enterprise/index.rst b/docs/source/enterprise/index.rst index 84fd2ed2193..2492048c41d 100644 --- a/docs/source/enterprise/index.rst +++ b/docs/source/enterprise/index.rst @@ -68,6 +68,12 @@ pages on this site apply to Enterprise deployments as well. :button_text: Install the SDK :button_link: installation.html +.. customcalloutitem:: + :header: Getting Started + :description: Learn how to upload your first dataset to FiftyOne Enterprise. + :button_text: Upload your first dataset + :button_link: getting_started.html + .. customcalloutitem:: :header: Cloud-backed media :description: Integrate FiftyOne Enterprise with your media stored in the cloud. @@ -161,6 +167,7 @@ pages on this site apply to Enterprise deployments as well. Overview Installation + Getting Started API connection Cloud-backed media Roles and permissions diff --git a/docs/source/enterprise/installation.rst b/docs/source/enterprise/installation.rst index 847fe8bf724..91d628250f3 100644 --- a/docs/source/enterprise/installation.rst +++ b/docs/source/enterprise/installation.rst @@ -5,19 +5,19 @@ FiftyOne Enterprise Installation .. default-role:: code -FiftyOne Enterprise deployments come with a centralized FiftyOne Enterprise App and -database that allows your entire team to collaborate securely on the same +FiftyOne Enterprise deployments come with a centralized FiftyOne Enterprise App +and database that allows your entire team to collaborate securely on the same datasets. FiftyOne Enterprise is deployed entirely into your environment, either on-premises or in a private cloud. Your data never leaves your environment. -FiftyOne Enterprise can be deployed on a wide variety of infrastructure solutions, -including Kubernetes and Docker. +FiftyOne Enterprise can be deployed on a wide variety of infrastructure +solutions, including Kubernetes and Docker. .. note:: - Detailed instructions for the initial FiftyOne Enterprise deployment, along with - all necessary components, are made available by your Voxel51 CS engineer - during the onboarding process. + Detailed instructions for the initial FiftyOne Enterprise deployment, along + with all necessary components, are made available by your Voxel51 support + team during the onboarding process. .. _enterprise-python-sdk: diff --git a/docs/source/enterprise/migrations.rst b/docs/source/enterprise/migrations.rst index 689ef590f4a..fe4e9c26956 100644 --- a/docs/source/enterprise/migrations.rst +++ b/docs/source/enterprise/migrations.rst @@ -5,21 +5,21 @@ Migrations .. default-role:: code -This page describes how to migrate between FiftyOne Enterprise versions, both for -:ref:`admins ` migrating the core Enterprise App infrastructure and -:ref:`individual users ` who need to install a new -version of the Enterprise Python SDK. +This page describes how to migrate between FiftyOne Enterprise versions, both +for :ref:`admins ` migrating the core FiftyOne Enterprise +infrastructure and :ref:`individual users ` who +need to install a new version of the FiftyOne Enterprise Python SDK. -Refer to :ref:`this section ` to see how to migrate -existing datasets from open source to Enterprise. +Refer to :ref:`this section ` to see how to +migrate existing datasets from FiftyOne Open Source to FiftyOne Enterprise. .. _enterprise-upgrade-python-sdk: Upgrading your Python SDK _________________________ -Users can upgrade their FiftyOne Enterprise Python client to the latest version as -follows: +Users can upgrade their FiftyOne Enterprise Python client to the latest version +as follows: .. code-block:: shell @@ -34,7 +34,8 @@ A specific FiftyOne Enterprise client version can be installed like so: .. note:: You can find your `TOKEN` by logging into the FiftyOne Enterprise App and - clicking on the :ref:`account icon ` in the upper right. + clicking on the :ref:`account icon ` in the upper + right. .. _enterprise-upgrading: @@ -44,9 +45,10 @@ _________________________ The basic **admin workflow** for upgrading a FiftyOne Enterprise deployment is: - :ref:`Upgrade ` all automated services and - individual user workflows that use the Enterprise Python SDK to an appropriate - SDK version -- Upgrade your core Enterprise App infrastructure (via Kubernetes, Docker, etc) + individual user workflows that use the FiftyOne Enterprise Python SDK to an + appropriate SDK version +- Upgrade your core FiftyOne Enterprise infrastructure (via Kubernetes, + Docker, etc) - Upgrade your database's version, as described below .. note:: @@ -76,11 +78,12 @@ below: first time they are loaded under a new database version. Often there is no migration required, but there could be. -`Unlike open source FiftyOne `_, -a Enterprise database is not automatically upgraded when a user connects to the -database with a newer Python client version. Instead, an admin must manually -upgrade your Enterprise database by installing the newest version of the Enterprise SDK -locally, assuming admin privileges, and running the command shown below: +`Unlike FiftyOne Open Source `_, +a FiftyOne Enterprise database is not automatically upgraded when a user +connects to the database with a newer Python client version. Instead, an admin +must manually upgrade your database by installing the newest version of the +FiftyOne Enterprise SDK locally, assuming admin privileges, and running the +following command: .. code-block:: shell @@ -103,16 +106,16 @@ locally, assuming admin privileges, and running the command shown below: Downgrading your deployment ___________________________ -Admins can also downgrade their FiftyOne Enterprise deployment to an older version -if necessary. +Admins can also downgrade their FiftyOne Enterprise deployment to an older +version if necessary. -The steps are the same as :ref:`when upgrading `, except that -you’ll need to know the appropriate database version to migrate down to. Each -version of Enterprise corresponds to a version of open source FiftyOne called its -"open source compatibility version", and this versioning system is used to set -the database version. +The steps are the same as :ref:`when upgrading `, except +that you’ll need to know the appropriate database version to migrate down to. +Each version of FiftyOne Enterprise corresponds to a version of FiftyOne Open +Source called its "open source compatibility version", and this versioning +system is used to set the database version. -For example, you can downgrade to Enterprise v0.10 like so: +For example, you can downgrade to FiftyOne Enterprise v0.10 like so: .. code-block:: shell @@ -127,17 +130,17 @@ For example, you can downgrade to Enterprise v0.10 like so: .. note:: - Contact your Voxel51 CS engineer if you need to know the open source - compatibility version for a particular Enterprise version that you wish to - downgrade to. + Contact your Voxel51 support team if you need to know the open source + compatibility version for a particular FiftyOne Enterprise version that + you wish to downgrade to. .. _enterprise-migrating-datasets: Migrating datasets to Enterprise ________________________________ -Any datasets that you have created via open source FiftyOne can be migrated to -your Enterprise deployment by exporting them in +Any datasets that you have created via FiftyOne Open Source can be migrated to +your FiftyOne Enterprise deployment by exporting them in :ref:`FiftyOneDataset ` format: .. code-block:: python @@ -154,8 +157,8 @@ your Enterprise deployment by exporting them in export_media=False, ) -and then re-importing them with the Enterprise SDK connected to your Enterprise -deployment: +and then re-importing them with the FiftyOne Enterprise SDK connected to your +Enterprise deployment: .. code-block:: python :linenos: @@ -170,10 +173,10 @@ deployment: ) Note that you'll need to update any local filepaths to cloud paths in order to -use the dataset in Enterprise. +use the dataset in FiftyOne Enterprise. -If you need to upload the local media to the cloud, the Enterprise SDK provides a -builtin utility for this: +If you need to upload the local media to the cloud, the FiftyOne Enterprise SDK +provides a builtin utility for this: .. code-block:: python :linenos: diff --git a/docs/source/enterprise/roles_and_permissions.rst b/docs/source/enterprise/roles_and_permissions.rst index 436732349eb..76426b5f0b7 100644 --- a/docs/source/enterprise/roles_and_permissions.rst +++ b/docs/source/enterprise/roles_and_permissions.rst @@ -5,14 +5,14 @@ Roles and permissions .. default-role:: code -FiftyOne Enterprise is built for collaboration, with the goal of making it as easy -as possible for engineers, data scientists, and stakeholders to work together -to build high quality datasets and computer vision models. +FiftyOne Enterprise is built for collaboration, with the goal of making it as +easy as possible for engineers, data scientists, and stakeholders to work +together to build high quality datasets and computer vision models. -Accordingly, FiftyOne Enterprise gives you the flexibility to configure user roles, -user groups and fine-grained permissions so that you can safely and securely -collaborate both inside and outside your organization at all stages of your -workflows. +Accordingly, FiftyOne Enterprise gives you the flexibility to configure user +roles, user groups and fine-grained permissions so that you can safely and +securely collaborate both inside and outside your organization at all stages of +your workflows. This page introduces the basic roles and permissions available in FiftyOne Enterprise. @@ -38,9 +38,9 @@ invitation. .. note:: - Invited users may login using any identity provider that has been enabled on your - deployment. If you need more information about configuring IdPs or increasing - your user quotas, contact your Voxel51 CS engineer. + Invited users may login using any identity provider that has been enabled on + your deployment. If you need more information about configuring IdPs or + increasing your user quotas, contact your Voxel51 support team. .. image:: /images/enterprise/user_invitation.png :alt: user-invitation @@ -102,10 +102,10 @@ view other users of the deployment. Groups ------ -User groups in FiftyOne Enterprise allow organization admins to manage a collection -of users as a single entity. This simplifies the process of assigning -permissions to multiple users, making it more efficient to control access to -datasets. +User groups in FiftyOne Enterprise allow organization admins to manage a +collection of users as a single entity. This simplifies the process of +assigning permissions to multiple users, making it more efficient to control +access to datasets. Admins can manage groups through the "Settings > Team > Groups" page. Each group can be given specific dataset access permissions, which apply to @@ -141,9 +141,9 @@ Admins and users with the **Can manage** permission on a dataset can configure a dataset's permissions under the dataset's :ref:`Manage tab ` in the FiftyOne Enterprise App. -In FiftyOne Enterprise, dataset permissions for a user are determined by both the -access they receive from their groups' permissions and individual permissions -assigned to them. +In FiftyOne Enterprise, dataset permissions for a user are determined by both +the access they receive from their groups' permissions and individual +permissions assigned to them. A user’s permissions on a dataset is the maximum of their permissions from the following sources: diff --git a/docs/source/images/enterprise/getting_started_cloud_creds.gif b/docs/source/images/enterprise/getting_started_cloud_creds.gif new file mode 100644 index 00000000000..61d606b7b4b Binary files /dev/null and b/docs/source/images/enterprise/getting_started_cloud_creds.gif differ diff --git a/docs/source/images/enterprise/getting_started_import_samples.gif b/docs/source/images/enterprise/getting_started_import_samples.gif new file mode 100644 index 00000000000..addd60c2a7d Binary files /dev/null and b/docs/source/images/enterprise/getting_started_import_samples.gif differ diff --git a/docs/source/images/enterprise/getting_started_install_io_plugin.gif b/docs/source/images/enterprise/getting_started_install_io_plugin.gif new file mode 100644 index 00000000000..af6d4c1c39c Binary files /dev/null and b/docs/source/images/enterprise/getting_started_install_io_plugin.gif differ diff --git a/docs/source/images/enterprise/getting_started_install_sdk.gif b/docs/source/images/enterprise/getting_started_install_sdk.gif new file mode 100644 index 00000000000..89550ef1d3f Binary files /dev/null and b/docs/source/images/enterprise/getting_started_install_sdk.gif differ diff --git a/docs/source/images/enterprise/getting_started_schedule_compute_metadata.gif b/docs/source/images/enterprise/getting_started_schedule_compute_metadata.gif new file mode 100644 index 00000000000..9ee360ee376 Binary files /dev/null and b/docs/source/images/enterprise/getting_started_schedule_compute_metadata.gif differ diff --git a/fiftyone/factory/repos/delegated_operation.py b/fiftyone/factory/repos/delegated_operation.py index 9a1b8565a01..0be2b7e37c4 100644 --- a/fiftyone/factory/repos/delegated_operation.py +++ b/fiftyone/factory/repos/delegated_operation.py @@ -186,18 +186,23 @@ def _create_indexes(self): ) ) - if "dataset_id_1" not in index_names: + if "parent_id_1" not in index_names: indices_to_create.append( IndexModel( - [("dataset_id", pymongo.ASCENDING)], name="dataset_id_1" + [("parent_id", pymongo.ASCENDING)], + name="parent_id_1", ) ) - if "parent_id_1" not in index_names: + if "dataset_id_1_parent_id_1_scheduled_at_1" not in index_names: indices_to_create.append( IndexModel( - [("parent_id", pymongo.ASCENDING)], - name="parent_id_1", + [ + ("dataset_id", pymongo.ASCENDING), + ("parent_id", pymongo.ASCENDING), + ("scheduled_at", pymongo.DESCENDING), + ], + name="dataset_id_1_parent_id_1_scheduled_at_1", ) ) diff --git a/fiftyone/factory/repos/delegated_operation_doc.py b/fiftyone/factory/repos/delegated_operation_doc.py index 870007be675..00e8523ff79 100644 --- a/fiftyone/factory/repos/delegated_operation_doc.py +++ b/fiftyone/factory/repos/delegated_operation_doc.py @@ -6,7 +6,6 @@ | """ import copy -import dataclasses import logging from datetime import datetime @@ -137,7 +136,18 @@ def from_pymongo(self, doc: dict): return self def to_pymongo(self) -> dict: - d = copy.deepcopy(self.__dict__) + # We make a copy of self.__dict__ so that changes we make below do not + # affect the actual object. We exclude certain keys that we don't want + # to serialize directly. "context" is particularly important we do not + # try to copy because it may contain big, complicated, non-serializable + # objects that may cause issues with copying. + + ignore_keys = {"_doc", "id", "context", "pipeline"} + d = { + k: copy.deepcopy(v) + for k, v in self.__dict__.items() + if k not in ignore_keys + } if self.context: d["context"] = { "request_params": self.context._get_serialized_request_params() @@ -145,6 +155,4 @@ def to_pymongo(self) -> dict: if self.pipeline: d["pipeline"] = self.pipeline.to_json() - d.pop("_doc", None) - d.pop("id", None) return d diff --git a/fiftyone/server/routes/__init__.py b/fiftyone/server/routes/__init__.py index e72d2f1de47..b708b4dd5f6 100644 --- a/fiftyone/server/routes/__init__.py +++ b/fiftyone/server/routes/__init__.py @@ -17,7 +17,7 @@ from .geo import GeoPoints from .get_similar_labels_frames import GetSimilarLabelsFrameCollection from .media import Media -from .sample import Sample +from .sample import SampleRoutes from .plugins import Plugins from .screenshot import Screenshot from .sort import Sort @@ -29,6 +29,7 @@ routes = ( EmbeddingsRoutes + OperatorRoutes + + SampleRoutes + [ ("/aggregate", Aggregate), ("/event", Event), @@ -38,7 +39,6 @@ ("/geo", GeoPoints), ("/media", Media), ("/plugins", Plugins), - ("/dataset/{dataset_id}/sample/{sample_id}", Sample), ("/sort", Sort), ("/screenshot/{img:str}", Screenshot), ("/tag", Tag), diff --git a/fiftyone/server/routes/sample.py b/fiftyone/server/routes/sample.py index 71c31b92ef7..d0d6fde89da 100644 --- a/fiftyone/server/routes/sample.py +++ b/fiftyone/server/routes/sample.py @@ -5,26 +5,80 @@ | `voxel51.com `_ | """ + import logging from starlette.endpoints import HTTPEndpoint from starlette.exceptions import HTTPException from starlette.requests import Request -import fiftyone.core.labels as fol +import fiftyone as fo import fiftyone.core.odm.utils as fou +from typing import List +from fiftyone.server.utils.jsonpatch import parse +from fiftyone.server.utils import transform_json from fiftyone.server.decorators import route +from typing import Any logger = logging.getLogger(__name__) -LABEL_CLASS_MAP = { - "Classification": fol.Classification, - "Classifications": fol.Classifications, - "Detection": fol.Detection, - "Detections": fol.Detections, - "Polyline": fol.Polyline, - "Polylines": fol.Polylines, -} + +def get_sample(dataset_id: str, sample_id: str) -> fo.Sample: + """Retrieves a sample from a dataset. + + Args: + dataset_id: the dataset ID + sample_id: the sample ID + + Returns: + the sample + + Raises: + HTTPException: if the dataset or sample is not found + """ + try: + dataset = fou.load_dataset(id=dataset_id) + except ValueError: + raise HTTPException( + status_code=404, + detail=f"Dataset '{dataset_id}' not found", + ) + + try: + sample = dataset[sample_id] + except KeyError: + raise HTTPException( + status_code=404, + detail=f"Sample '{sample_id}' not found in dataset '{dataset_id}'", + ) + + return sample + + +def handle_json_patch(target: Any, patch_list: List[dict]) -> Any: + """Applies a list of JSON patch operations to a target object.""" + try: + patches = parse(patch_list, transform_fn=transform_json) + except Exception as e: + raise HTTPException( + status_code=400, + detail=f"Failed to parse patches due to: {e}", + ) + + errors = {} + for i, p in enumerate(patches): + try: + p.apply(target) + except Exception as e: + logger.error("Error applying patch %s: %s", p, e) + errors[str(patch_list[i])] = str(e) + + if errors: + raise HTTPException( + status_code=400, + detail=errors, + ) + return target class Sample(HTTPEndpoint): @@ -32,15 +86,12 @@ class Sample(HTTPEndpoint): async def patch(self, request: Request, data: dict) -> dict: """Applies a list of field updates to a sample. + See: https://datatracker.ietf.org/doc/html/rfc6902 + Args: request: Starlette request with dataset_id and sample_id in path params data: A dict mapping field names to values. - Field value handling: - - None: deletes the field - - dict with "_cls" key: deserializes as a FiftyOne label using from_dict - - other: assigns the value directly to the field - Returns: the final state of the sample as a dict """ @@ -53,28 +104,23 @@ async def patch(self, request: Request, data: dict) -> dict: dataset_id, ) - if not isinstance(data, dict): - raise HTTPException( - status_code=400, - detail="Request body must be a JSON object mapping field names to values", - ) - - try: - dataset = fou.load_dataset(id=dataset_id) - except ValueError: - raise HTTPException( - status_code=404, - detail=f"Dataset '{dataset_id}' not found", - ) + sample = get_sample(dataset_id, sample_id) - try: - sample = dataset[sample_id] - except KeyError: + content_type = request.headers.get("Content-Type", "") + ctype = content_type.split(";", 1)[0].strip().lower() + if ctype == "application/json": + result = self._handle_patch(sample, data) + elif ctype == "application/json-patch+json": + result = handle_json_patch(sample, data) + else: raise HTTPException( - status_code=404, - detail=f"Sample '{sample_id}' not found in dataset '{dataset_id}'", + status_code=415, + detail=f"Unsupported Content-Type '{ctype}'", ) + sample.save() + return result.to_dict(include_private=True) + def _handle_patch(self, sample: fo.Sample, data: dict) -> dict: errors = {} for field_name, value in data.items(): try: @@ -82,20 +128,7 @@ async def patch(self, request: Request, data: dict) -> dict: sample.clear_field(field_name) continue - if isinstance(value, dict) and "_cls" in value: - cls_name = value.get("_cls") - if cls_name in LABEL_CLASS_MAP: - label_cls = LABEL_CLASS_MAP[cls_name] - try: - sample[field_name] = label_cls.from_dict(value) - except Exception as e: - errors[field_name] = str(e) - else: - errors[ - field_name - ] = f"Unsupported label class '{cls_name}'" - else: - sample[field_name] = value + sample[field_name] = transform_json(value) except Exception as e: errors[field_name] = str(e) @@ -104,6 +137,68 @@ async def patch(self, request: Request, data: dict) -> dict: status_code=400, detail=errors, ) + return sample + + +class SampleField(HTTPEndpoint): + @route + async def patch(self, request: Request, data: dict) -> dict: + """Applies a list of field updates to a sample field in a list by id. + + See: https://datatracker.ietf.org/doc/html/rfc6902 + + Args: + request: Starlette request with dataset_id and sample_id in path params + data: patch of type op, path, value. + + Returns: + the final state of the sample as a dict + """ + dataset_id = request.path_params["dataset_id"] + sample_id = request.path_params["sample_id"] + path = request.path_params["field_path"] + field_id = request.path_params["field_id"] + + logger.info( + "Received patch request for field %s with ID %s on sample %s in dataset %s", + path, + field_id, + sample_id, + dataset_id, + ) + + sample = get_sample(dataset_id, sample_id) + + try: + field_list = sample.get_field(path) + except Exception as e: + raise HTTPException( + status_code=404, + detail=f"Field '{path}' not found in sample '{sample_id}'", + ) + + if not isinstance(field_list, list): + raise HTTPException( + status_code=400, + detail=f"Field '{path}' is not a list", + ) + + field = next((f for f in field_list if f.id == field_id), None) + if field is None: + raise HTTPException( + status_code=404, + detail=f"Field with id '{field_id}' not found in field '{path}'", + ) + + result = handle_json_patch(field, data) sample.save() + return result.to_dict() + - return sample.to_dict(include_private=True) +SampleRoutes = [ + ("/dataset/{dataset_id}/sample/{sample_id}", Sample), + ( + "/dataset/{dataset_id}/sample/{sample_id}/{field_path}/{field_id}", + SampleField, + ), +] diff --git a/fiftyone/server/utils.py b/fiftyone/server/utils/__init__.py similarity index 97% rename from fiftyone/server/utils.py rename to fiftyone/server/utils/__init__.py index bfa31fca713..d27c4e45ad7 100644 --- a/fiftyone/server/utils.py +++ b/fiftyone/server/utils/__init__.py @@ -15,6 +15,9 @@ import fiftyone.core.dataset as fod import fiftyone.core.fields as fof +from fiftyone.server.utils.json_transform import ( + transform as transform_json, +) # auto-register resource types _cache = cachetools.TTLCache(maxsize=10, ttl=900) # ttl in seconds diff --git a/fiftyone/server/utils/json_transform/__init__.py b/fiftyone/server/utils/json_transform/__init__.py new file mode 100644 index 00000000000..aff7a01df37 --- /dev/null +++ b/fiftyone/server/utils/json_transform/__init__.py @@ -0,0 +1,10 @@ +""" +FiftyOne Server utils json transform. + +| Copyright 2017-2025, Voxel51, Inc. +| `voxel51.com `_ +| +""" + +import fiftyone.server.utils.json_transform.types # auto-register resource types +from fiftyone.server.utils.json_transform.transform import transform diff --git a/fiftyone/server/utils/json_transform/transform.py b/fiftyone/server/utils/json_transform/transform.py new file mode 100644 index 00000000000..a258effc53f --- /dev/null +++ b/fiftyone/server/utils/json_transform/transform.py @@ -0,0 +1,66 @@ +"""Transform a json value. + +| Copyright 2017-2025, Voxel51, Inc. +| `voxel51.com `_ +| +""" +from typing import Any, Callable, Type, TypeVar + +T = TypeVar("T") + +REGISTRY: dict[Type[T], Callable[[dict], T]] = {} + + +def register( + cls: Type[T], # pylint: disable=redefined-builtin +) -> Callable[[Callable[[dict], T]], Callable[[dict], T]]: + """Register a validator function for a resource type. + + Args: + cls Type[T]: The resource type + + Returns: + Callable[[Callable[[dict], T]], Callable[[dict], T]]: A decorator + that registers the decorated function as a validator for the given + resource type. + """ + + def inner(fn: Callable[[dict], T]) -> Callable[[dict], T]: + if not callable(fn): + raise TypeError("fn must be callable") + + if cls in REGISTRY: + raise ValueError( + f"Resource type '{cls.__name__}' validator already registered" + ) + + REGISTRY[cls] = fn + + return fn + + return inner + + +def transform( + value: Any, +) -> Any: + """Transforms a patch value if there is a registered transform method. + Args: + value (Any): The patch value optionally containing "_cls" key. + + Returns: + Any: The transformed value or the original value if no transform is found. + """ + if not isinstance(value, dict): + return value + + func = None + cls_name = value.get("_cls") + if cls_name: + func = next( + (fn for cls, fn in REGISTRY.items() if cls.__name__ == cls_name), + None, + ) + if not func: + raise ValueError(f"No transform registered for class '{cls_name}'") + return func(value) if func else value diff --git a/fiftyone/server/utils/json_transform/types.py b/fiftyone/server/utils/json_transform/types.py new file mode 100644 index 00000000000..d1dc08ada3d --- /dev/null +++ b/fiftyone/server/utils/json_transform/types.py @@ -0,0 +1,38 @@ +"""Json types registery. + +| Copyright 2017-2025, Voxel51, Inc. +| `voxel51.com `_ +| +""" +from fiftyone.server.utils.json_transform.transform import register +import fiftyone.core.labels as fol + + +@register(fol.Classification) +def transform_classification(value: dict) -> fol.Classification: + return fol.Classification.from_dict(value) + + +@register(fol.Classifications) +def transform_classifications(value: dict) -> fol.Classifications: + return fol.Classifications.from_dict(value) + + +@register(fol.Detection) +def transform_detection(value: dict) -> fol.Detection: + return fol.Detection.from_dict(value) + + +@register(fol.Detections) +def transform_detections(value: dict) -> fol.Detections: + return fol.Detections.from_dict(value) + + +@register(fol.Polyline) +def transform_polyline(value: dict) -> fol.Polyline: + return fol.Polyline.from_dict(value) + + +@register(fol.Polylines) +def transform_polylines(value: dict) -> fol.Polylines: + return fol.Polylines.from_dict(value) diff --git a/fiftyone/server/utils/jsonpatch/__init__.py b/fiftyone/server/utils/jsonpatch/__init__.py new file mode 100644 index 00000000000..4348933e199 --- /dev/null +++ b/fiftyone/server/utils/jsonpatch/__init__.py @@ -0,0 +1,86 @@ +""" +Apply JSON patch to python objects. + +| Copyright 2017-2025, Voxel51, Inc. +| `voxel51.com `_ +| +""" + +from typing import Any, Callable, Iterable, Optional, Union + +from fiftyone.server.utils.jsonpatch.methods import ( + add, + copy, + move, + remove, + replace, + test, +) +from fiftyone.server.utils.jsonpatch.patch import ( + Patch, + Operation, + Add, + Copy, + Move, + Remove, + Replace, + Test, +) + +__PATCH_MAP = { + Operation.ADD: Add, + Operation.COPY: Copy, + Operation.MOVE: Move, + Operation.REMOVE: Remove, + Operation.REPLACE: Replace, + Operation.TEST: Test, +} + + +def parse( + patches: Union[dict[str, Any], Iterable[dict[str, Any]]], + *, + transform_fn: Optional[Callable[[Any], Any]] = None, +) -> Union[Patch, list[Patch]]: + """Parses the provided JSON patch dicts into Patch objects.""" + + return_one = False + if isinstance(patches, dict): + patches = [patches] + return_one = True + elif not isinstance(patches, Iterable): + raise TypeError("Patches must be a dict or an iterable of dicts") + + parsed = [] + for patch in patches: + try: + op_str = patch["op"] + path = patch["path"] + except KeyError as err: + raise ValueError(f"Missing {err} field") from err + + try: + op = Operation(op_str) + patch_cls = __PATCH_MAP[op] + except (ValueError, KeyError) as err: + raise TypeError(f"Unsupported operation '{op_str}'") from err + + kwargs = {"path": path} + try: + if op in (Operation.ADD, Operation.REPLACE, Operation.TEST): + kwargs.update( + value=( + transform_fn(patch["value"]) + if transform_fn + else patch["value"] + ) + ) + + if op in (Operation.COPY, Operation.MOVE): + kwargs.update(from_=patch["from"]) + + parsed.append(patch_cls(**kwargs)) + except Exception as err: + raise ValueError(f"Invalid operation '{op_str}'") from err + + return parsed if not return_one else parsed[0] diff --git a/fiftyone/server/utils/jsonpatch/methods.py b/fiftyone/server/utils/jsonpatch/methods.py new file mode 100644 index 00000000000..b167531628a --- /dev/null +++ b/fiftyone/server/utils/jsonpatch/methods.py @@ -0,0 +1,305 @@ +""" +Apply JSON patch to python objects. + +| Copyright 2017-2025, Voxel51, Inc. +| `voxel51.com `_ +| +""" + +from typing import TypeVar, Union + +import jsonpointer + + +T = TypeVar("T") +V = TypeVar("V") + + +def to_json_pointer( + path: Union[str, jsonpointer.JsonPointer], +) -> jsonpointer.JsonPointer: + """Converts a string path to a `jsonpointer.JsonPointer`.""" + + if isinstance(path, jsonpointer.JsonPointer): + return path + + try: + return jsonpointer.JsonPointer(path) + except jsonpointer.JsonPointerException as err: + raise ValueError(f"Invalid JSON pointer path: {path}") from err + + +def get(src: T, path: Union[str, jsonpointer.JsonPointer]) -> V: + """Gets a value from an object. + + Args: + src (T): The source object. + path (Union[str, jsonpointer.JsonPointer]): The JSON pointer path to + resolve. + + Raises: + AttributeError: If the path cannot be fully resolved. + + Returns: + V: The resolved value. + """ + try: + pointer = to_json_pointer(path) + + value = src + for name in pointer.parts: + try: + value = getattr(value, name) + continue + except AttributeError as attr_err: + if hasattr(value, "__getitem__"): + try: + value = value[name] + continue + except TypeError as err: + if "list indices must be integers or slices" in str( + err + ): + idx = int(name) + + if not 0 <= idx < len(value): + raise IndexError( + "List index out of range" + ) from err + + value = value[idx] + continue + + raise attr_err + + return value + except Exception as err: + raise AttributeError(f"Cannot resolve path: {path}: {err}") from err + + +def add( + src: T, path: Union[str, jsonpointer.JsonPointer], value: V +) -> Union[T, V]: + """The "add" operation performs one of the following functions, + depending upon what the target location references: + + o If the target location specifies an array index, a new value is + inserted into the array at the specified index. + o If the target location specifies an object member that does not + already exist, a new member is added to the object. + o If the target location specifies an object member that does exist, + that member's value is replaced. + + The operation object MUST contain a "value" member whose content + specifies the value to be added. + + When the operation is applied, the target location MUST reference one + of: + + o The root of the target document - whereupon the specified value + becomes the entire content of the target document. + + o A member to add to an existing object - whereupon the supplied + value is added to that object at the indicated location. If the + member already exists, it is replaced by the specified value. + + o An element to add to an existing array - whereupon the supplied + value is added to the array at the indicated location. Any + elements at or above the specified index are shifted one position + to the right. The specified index MUST NOT be greater than the + number of elements in the array. If the "-" character is used to + index the end of the array (see [RFC6901]), this has the effect of + appending the value to the array. + """ + + pointer = to_json_pointer(path) + if not pointer.parts: + return value + + target = get(src, jsonpointer.JsonPointer.from_parts(pointer.parts[:-1])) + name = pointer.parts[-1] + + try: + if hasattr(target, "__setitem__"): + try: + target[name] = value + except TypeError as type_err: + if "list indices must be integers or slices" not in str( + type_err + ): + raise type_err + + if isinstance(target, list) and name == "-": + target.append(value) + else: + try: + idx = int(name) + except ValueError as val_err: + raise val_err + + if not 0 <= idx <= len(target): + raise IndexError( + "List index out of range" + ) from type_err + + target.insert(idx, value) + else: + try: + setattr(target, name, value) + except AttributeError as err: + raise err + + except Exception as err: + raise ValueError( + f"Unable to add value with path: {pointer.path}" + ) from err + + return src + + +def copy( + src: T, + path: Union[str, jsonpointer.JsonPointer], + from_: Union[str, jsonpointer.JsonPointer], +) -> T: + """The "copy" operation copies the value at a specified location to the + target location. + + The operation object MUST contain a "from" member, which is a string + containing a JSON Pointer value that references the location in the + target document to copy the value from. + + The "from" location MUST exist for the operation to be successful. + + This operation is functionally identical to an "add" operation at the + target location using the value specified in the "from" member. + """ + + pointer = to_json_pointer(path) + from_pointer = to_json_pointer(from_) + + value = get(src, from_pointer) + add(src, pointer, value) + return src + + +def move( + src: T, + path: Union[str, jsonpointer.JsonPointer], + from_: Union[str, jsonpointer.JsonPointer], +) -> T: + """The "move" operation removes the value at a specified location and + adds it to the target location. + + The operation object MUST contain a "from" member, which is a string + containing a JSON Pointer value that references the location in the + target document to move the value from. + + The "from" location MUST exist for the operation to be successful. + + This operation is functionally identical to a "remove" operation on + the "from" location, followed immediately by an "add" operation at + the target location with the value that was just removed. + + """ + pointer = to_json_pointer(path) + from_pointer = to_json_pointer(from_) + + value = get(src, from_pointer) + remove(src, from_pointer) + add(src, pointer, value) + + return src + + +def remove(src: T, path: Union[str, jsonpointer.JsonPointer]) -> T: + """The "remove" operation removes the value at the target location. + + The target location MUST exist for the operation to be successful. + + If removing an element from an array, any elements above the + specified index are shifted one position to the left. + """ + + pointer = to_json_pointer(path) + if not pointer.parts: + raise ValueError("Cannot remove the root document") + + target = get(src, jsonpointer.JsonPointer.from_parts(pointer.parts[:-1])) + name = pointer.parts[-1] + + # ensure value exists + get(target, jsonpointer.JsonPointer.from_parts([name])) + + try: + if hasattr(target, "__delitem__"): + try: + del target[name] + except TypeError as err: + if "list indices must be integers or slices" not in str(err): + raise err + + target.pop(int(name)) + else: + delattr(target, name) + + except Exception as err: + raise ValueError( + f"Unable to remove value with path: {pointer.path}" + ) from err + + return src + + +def replace(src: T, path: Union[str, jsonpointer.JsonPointer], value: V) -> T: + """The "replace" operation replaces the value at the target location + with a new value. The operation object MUST contain a "value" member + whose content specifies the replacement value. + + The target location MUST exist for the operation to be successful. + + This operation is functionally identical to a "remove" operation for + a value, followed immediately by an "add" operation at the same + location with the replacement value. + """ + pointer = to_json_pointer(path) + + remove(src, pointer) + add(src, pointer, value) + + return src + + +def test(src: T, path: Union[str, jsonpointer.JsonPointer], value: V) -> T: + """The "test" operation tests that a value at the target location is + equal to a specified value. + + The operation object MUST contain a "value" member that conveys the + value to be compared to the target location's value. + + The target location MUST be equal to the "value" value for the + operation to be considered successful. + + Here, "equal" means that the value at the target location and the + value conveyed by "value" are of the same JSON type, and that they + are considered equal by the following rules for that type: + + o strings: are considered equal if they contain the same number of + Unicode characters and their code points are byte-by-byte equal. + + o numbers: are considered equal if their values are numerically + equal. + + o arrays: are considered equal if they contain the same number of + values, and if each value can be considered equal to the value at + the corresponding position in the other array, using this list of + type-specific rules. + """ + + pointer = to_json_pointer(path) + target = src if not pointer.parts else get(src, pointer) + + if value != target: + raise ValueError(f"Test operation failed for path:{pointer.path}") + + return src diff --git a/fiftyone/server/utils/jsonpatch/patch.py b/fiftyone/server/utils/jsonpatch/patch.py new file mode 100644 index 00000000000..f43a32caff5 --- /dev/null +++ b/fiftyone/server/utils/jsonpatch/patch.py @@ -0,0 +1,142 @@ +""" +Apply JSON patch to python objects. + +| Copyright 2017-2025, Voxel51, Inc. +| `voxel51.com `_ +| +""" + +import abc +import enum +import inspect +from typing import Any, Generic, TypeVar, Union + + +from fiftyone.server.utils.jsonpatch import methods + +T = TypeVar("T") +V = TypeVar("V") + + +class Operation(str, enum.Enum): + """The type of JSON Patch operation.""" + + ADD = "add" + REMOVE = "remove" + REPLACE = "replace" + MOVE = "move" + COPY = "copy" + TEST = "test" + + +class Patch(abc.ABC): + """A JSON Patch operation. + + See: https://datatracker.ietf.org/doc/html/rfc6902 + """ + + op: Operation + + def __init_subclass__(cls): + if not inspect.isabstract(cls) and not isinstance( + getattr(cls, "op", None), Operation + ): + raise TypeError("Subclass must define 'op' class variable") + + def __init__(self, path: str): + self._pointer = methods.to_json_pointer(path) + + @property + def path(self) -> str: + """The JSON pointer path of the patch operation.""" + return self._pointer.path + + @abc.abstractmethod + def apply(self, src: Any) -> Any: + """Applies the patch operation an object. + + Args: + src (T): The source object. + + Raises: + AttributeError: If the path cannot be fully resolved. + ValueError: If the patch operation fails. + + Returns: + T: The patched source object. + """ + + +class PatchWithValue(Patch, Generic[T], abc.ABC): + """A JSON Patch operation that requires a value.""" + + def __init__(self, path: str, value: T): + super().__init__(path) + self.value = value + + +class PatchWithFrom(Patch, abc.ABC): + """A JSON Patch operation that requires a from path.""" + + def __init__(self, path: str, from_: str): + super().__init__(path) + self._from_pointer = methods.to_json_pointer(from_) + + @property + def from_(self) -> str: + """The JSON pointer 'from' path of the patch operation.""" + return self._from_pointer.path + + +class Add(PatchWithValue): + """Helper class for JSON Patch "add" operation.""" + + op = Operation.ADD + + def apply(self, src: T) -> Union[T, V]: + return methods.add(src, self._pointer, self.value) + + +class Copy(PatchWithFrom): + """Helper class for JSON Patch "copy" operation.""" + + op = Operation.COPY + + def apply(self, src: T) -> T: + return methods.copy(src, self._pointer, self._from_pointer) + + +class Move(PatchWithFrom): + """Helper class for JSON Patch "move" operation.""" + + op = Operation.MOVE + + def apply(self, src: T) -> T: + return methods.move(src, self._pointer, self._from_pointer) + + +class Remove(Patch): + """Helper class for JSON Patch "remove" operation.""" + + op = Operation.REMOVE + + def apply(self, src: T) -> T: + return methods.remove(src, self._pointer) + + +class Replace(PatchWithValue): + """Helper class for JSON Patch "replace" operation.""" + + op = Operation.REPLACE + + def apply(self, src: T) -> T: + return methods.replace(src, self._pointer, self.value) + + +class Test(PatchWithValue): + """Helper class for JSON Patch "test" operation.""" + + op = Operation.TEST + + def apply(self, src: T) -> T: + return methods.test(src, self._pointer, self.value) diff --git a/setup.py b/setup.py index b4cf1865a66..2d91a7eae9b 100644 --- a/setup.py +++ b/setup.py @@ -44,6 +44,7 @@ def get_version(): "humanize", "hypercorn>=0.13.2", "Jinja2>=3", + "jsonpatch", "matplotlib", "mongoengine~=0.29.1", "motor~=3.6.0", diff --git a/tests/unittests/operators/delegated_tests.py b/tests/unittests/operators/delegated_tests.py index ce689fea7cd..cb74480c890 100644 --- a/tests/unittests/operators/delegated_tests.py +++ b/tests/unittests/operators/delegated_tests.py @@ -1671,3 +1671,34 @@ def test_set_queue_remote_service(self, mock_get_operator): ##### self.assertRaises(PermissionError, dos.set_queued, op_id) ##### + + def test_queue_panel_delegated_op(self, mock_get_operator): + """Queue DO that comes from a panel""" + self.mock_is_remote_service.return_value = True + db = delegated_operation.MongoDelegatedOperationRepo() + dos = DelegatedOperationService(repo=db) + ctx = ExecutionContext( + request_params={ + "params": { + "panel_id": bson.ObjectId(), + "panel_state": {"foo2": "bar2"}, + } + } + ) + ctx.request_params = {"foo": "bar"} + ctx.params = { + "panel_id": bson.ObjectId(), + "panel_state": {"foo2": "bar2"}, + } + + ##### + doc = dos.queue_operation( + operator=f"{TEST_DO_PREFIX}/operator/foo", + label=mock_get_operator.return_value.name, + delegation_target="test_target", + context=ctx, + ) + ##### + + self.docs_to_delete.append(doc) + self.assertEqual(doc.run_state, ExecutionRunState.SCHEDULED) diff --git a/tests/unittests/sample_route_tests.py b/tests/unittests/sample_route_tests.py index 5aa2fa72953..38bde0dfea8 100644 --- a/tests/unittests/sample_route_tests.py +++ b/tests/unittests/sample_route_tests.py @@ -53,6 +53,20 @@ def tearDown(self): if self.dataset and fo.dataset_exists(self.dataset.name): fo.delete_dataset(self.dataset.name) + def _create_mock_request(self, payload, content_type="application/json"): + """Helper to create a mock request object.""" + mock_request = MagicMock() + mock_request.path_params = { + "dataset_id": self.dataset_id, + "sample_id": str(self.sample.id), + } + mock_request.headers = {"Content-Type": content_type} + + mock_request.body = AsyncMock( + return_value=json_util.dumps(payload).encode("utf-8") + ) + return mock_request + async def test_update_detection(self): """ Tests updating an existing detection @@ -77,15 +91,9 @@ async def test_update_detection(self): "tags": None, } - mock_request = MagicMock() - mock_request.path_params = { - "dataset_id": self.dataset_id, - "sample_id": str(self.sample.id), - } - mock_request.body = AsyncMock( - return_value=json_util.dumps(patch_payload).encode("utf-8") + response = await self.mutator.patch( + self._create_mock_request(patch_payload) ) - response = await self.mutator.patch(mock_request) response_dict = json.loads(response.body) self.assertIsInstance(response, Response) self.assertEqual(response.status_code, 200) @@ -136,15 +144,9 @@ async def test_add_detection(self): }, } - mock_request = MagicMock() - mock_request.path_params = { - "dataset_id": self.dataset_id, - "sample_id": str(self.sample.id), - } - mock_request.body = AsyncMock( - return_value=json_util.dumps(patch_payload).encode("utf-8") + response = await self.mutator.patch( + self._create_mock_request(patch_payload) ) - response = await self.mutator.patch(mock_request) response_dict = json.loads(response.body) self.assertIsInstance(response_dict, dict) updated_detection = self.sample.ground_truth_2.detections[0] @@ -165,15 +167,9 @@ async def test_add_classification(self): }, } - mock_request = MagicMock() - mock_request.path_params = { - "dataset_id": self.dataset_id, - "sample_id": str(self.sample.id), - } - mock_request.body = AsyncMock( - return_value=json_util.dumps(patch_payload).encode("utf-8") + response = await self.mutator.patch( + self._create_mock_request(patch_payload) ) - response = await self.mutator.patch(mock_request) response_dict = json.loads(response.body) self.assertIsInstance(response_dict, dict) updated_detection = self.sample.weather @@ -228,22 +224,13 @@ async def test_unsupported_label_class(self): "label": "invalid", } } - mock_request = MagicMock() - mock_request.path_params = { - "dataset_id": self.dataset_id, - "sample_id": str(self.sample.id), - } - - mock_request.body = AsyncMock( - return_value=json_util.dumps(patch_payload).encode("utf-8") - ) with self.assertRaises(HTTPException) as cm: - await self.mutator.patch(mock_request) + await self.mutator.patch(self._create_mock_request(patch_payload)) self.assertEqual(cm.exception.status_code, 400) self.assertEqual( cm.exception.detail["bad_label"], - "Unsupported label class 'NonExistentLabelType'", + "No transform registered for class 'NonExistentLabelType'", ) # Verify the sample was not modified @@ -263,17 +250,8 @@ async def test_malformed_label_data(self): } } - mock_request = MagicMock() - mock_request.path_params = { - "dataset_id": self.dataset_id, - "sample_id": str(self.sample.id), - } - - mock_request.body = AsyncMock( - return_value=json_util.dumps(patch_payload).encode("utf-8") - ) with self.assertRaises(HTTPException) as cm: - await self.mutator.patch(mock_request) + await self.mutator.patch(self._create_mock_request(patch_payload)) self.assertEqual(cm.exception.status_code, 400) response_dict = cm.exception.detail @@ -291,6 +269,325 @@ async def test_malformed_label_data(self): str(self.initial_detection_id), ) + async def test_patch_replace_primitive_field(self): + """Tests 'replace' on a primitive field with json-patch.""" + new_value = "updated_value" + patch_payload = [ + {"op": "replace", "path": "/primitive_field", "value": new_value} + ] + + mock_request = self._create_mock_request( + patch_payload, content_type="application/json-patch+json" + ) + + response = await self.mutator.patch(mock_request) + response_dict = json.loads(response.body) + self.assertEqual(response_dict["primitive_field"], new_value) + + self.sample.reload() + self.assertEqual(self.sample.primitive_field, new_value) + + async def test_patch_replace_nested_label_attribute(self): + """Tests 'replace' on a nested attribute of a label with json-patch.""" + new_label = "dog" + patch_payload = [ + { + "op": "replace", + "path": "/ground_truth/detections/0/label", + "value": new_label, + } + ] + mock_request = self._create_mock_request( + patch_payload, content_type="application/json-patch+json" + ) + await self.mutator.patch(mock_request) + + self.sample.reload() + self.assertEqual( + self.sample.ground_truth.detections[0].label, new_label + ) + + async def test_patch_add_detection_to_list(self): + """Tests 'add' to a list of labels, testing the transform function.""" + new_detection = { + "_cls": "Detection", + "label": "dog", + "bounding_box": [0.5, 0.5, 0.2, 0.2], + } + patch_payload = [ + { + "op": "add", + "path": "/ground_truth/detections/-", # Path to the list + "value": new_detection, + } + ] + mock_request = self._create_mock_request( + patch_payload, content_type="application/json-patch+json" + ) + + await self.mutator.patch(mock_request) + + self.sample.reload() + self.assertEqual(len(self.sample.ground_truth.detections), 2) + self.assertIsInstance( + self.sample.ground_truth.detections[1], fol.Detection + ) + self.assertEqual(self.sample.ground_truth.detections[1].label, "dog") + + async def test_patch_remove_detection_from_list(self): + """Tests 'remove' from a list of labels.""" + self.assertEqual(len(self.sample.ground_truth.detections), 1) + + patch_payload = [ + {"op": "remove", "path": "/ground_truth/detections/0"} + ] + mock_request = self._create_mock_request( + patch_payload, content_type="application/json-patch+json" + ) + + await self.mutator.patch(mock_request) + + self.sample.reload() + self.assertEqual(len(self.sample.ground_truth.detections), 0) + + async def test_patch_multiple_operations(self): + """Tests a patch request with multiple operations.""" + patch_payload = [ + {"op": "replace", "path": "/primitive_field", "value": "multi-op"}, + {"op": "remove", "path": "/ground_truth/detections/0"}, + ] + mock_request = self._create_mock_request( + patch_payload, content_type="application/json-patch+json" + ) + + await self.mutator.patch(mock_request) + + self.sample.reload() + self.assertEqual(self.sample.primitive_field, "multi-op") + self.assertEqual(len(self.sample.ground_truth.detections), 0) + + async def test_patch_invalid_path(self): + """Tests that a 400 is raised for an invalid path.""" + patch_payload = [ + {"op": "replace", "path": "/non_existent_field", "value": "test"} + ] + mock_request = self._create_mock_request( + patch_payload, content_type="application/json-patch+json" + ) + + with self.assertRaises(HTTPException) as cm: + await self.mutator.patch(mock_request) + + self.assertEqual(cm.exception.status_code, 400) + self.assertIn(str(patch_payload[0]), cm.exception.detail) + + async def test_patch_invalid_format(self): + """Tests that a 400 is raised for a malformed patch operation.""" + patch_payload = [ + {"path": "/primitive_field", "value": "test"} + ] # missing 'op' + mock_request = self._create_mock_request( + patch_payload, content_type="application/json-patch+json" + ) + + with self.assertRaises(HTTPException) as cm: + await self.mutator.patch(mock_request) + + self.assertEqual(cm.exception.status_code, 400) + self.assertIn( + "Failed to parse patches due to", + cm.exception.detail, + ) + + +class SampleFieldRouteTests(unittest.IsolatedAsyncioTestCase): + def setUp(self): + """Sets up a persistent dataset with a sample for each test.""" + self.mutator = fors.SampleField( + scope={"type": "http"}, + receive=AsyncMock(), + send=AsyncMock(), + ) + self.dataset = fo.Dataset() + self.dataset.persistent = True + self.dataset_id = self.dataset._doc.id + + sample = fo.Sample(filepath="/tmp/test_sample_field.jpg") + + self.detection_id_1 = ObjectId() + self.detection_id_2 = ObjectId() + sample["ground_truth"] = fol.Detections( + detections=[ + fol.Detection( + id=self.detection_id_1, + label="cat", + bounding_box=[0.1, 0.1, 0.2, 0.2], + confidence=0.9, + ), + fol.Detection( + id=self.detection_id_2, + label="dog", + bounding_box=[0.4, 0.4, 0.3, 0.3], + confidence=0.8, + ), + ] + ) + sample["scalar_field"] = "not a list" + + self.dataset.add_sample(sample) + self.sample = sample + + def tearDown(self): + """Deletes the dataset after each test.""" + if self.dataset and fo.dataset_exists(self.dataset.name): + fo.delete_dataset(self.dataset.name) + + def _create_mock_request(self, payload, field_path, field_id): + """Helper to create a mock request object for SampleField.""" + mock_request = MagicMock() + mock_request.path_params = { + "dataset_id": self.dataset_id, + "sample_id": str(self.sample.id), + "field_path": field_path, + "field_id": str(field_id), + } + mock_request.headers = {"Content-Type": "application/json"} + + mock_request.body = AsyncMock( + return_value=json_util.dumps(payload).encode("utf-8") + ) + return mock_request + + async def test_update_label_in_list(self): + """Tests updating a label within a list field.""" + new_label = "person" + patch_payload = [ + {"op": "replace", "path": "/label", "value": new_label} + ] + field_path = "ground_truth.detections" + field_id = self.detection_id_1 + + request = self._create_mock_request( + patch_payload, field_path, field_id + ) + response = await self.mutator.patch(request) + response_dict = json.loads(response.body) + + self.assertIsInstance(response, Response) + self.assertEqual(response.status_code, 200) + # check response body + self.assertEqual(response_dict["label"], new_label) + self.assertEqual(response_dict["_id"]["$oid"], str(field_id)) + + # check database state + self.sample.reload() + detection1 = self.sample.ground_truth.detections[0] + detection2 = self.sample.ground_truth.detections[1] + + self.assertEqual(detection1.id, str(field_id)) + self.assertEqual(detection1.label, new_label) + self.assertEqual( + detection2.id, str(self.detection_id_2) + ) # ensure other item is not modified + self.assertEqual(detection2.label, "dog") + + async def test_dataset_not_found(self): + """Tests that a 404 is raised for a non-existent dataset.""" + request = self._create_mock_request( + [], "ground_truth.detections", self.detection_id_1 + ) + request.path_params["dataset_id"] = "non-existent-dataset" + + with self.assertRaises(HTTPException) as cm: + await self.mutator.patch(request) + + self.assertEqual(cm.exception.status_code, 404) + self.assertEqual( + cm.exception.detail, "Dataset 'non-existent-dataset' not found" + ) + + async def test_sample_not_found(self): + """Tests that a 404 is raised for a non-existent sample.""" + bad_id = str(ObjectId()) + request = self._create_mock_request( + [], "ground_truth.detections", self.detection_id_1 + ) + request.path_params["sample_id"] = bad_id + + with self.assertRaises(HTTPException) as cm: + await self.mutator.patch(request) + + self.assertEqual(cm.exception.status_code, 404) + self.assertEqual( + cm.exception.detail, + f"Sample '{bad_id}' not found in dataset '{self.dataset_id}'", + ) + + async def test_field_path_not_found(self): + """Tests that a 404 is raised for a non-existent field path.""" + bad_path = "non_existent.path" + request = self._create_mock_request([], bad_path, self.detection_id_1) + + with self.assertRaises(HTTPException) as cm: + await self.mutator.patch(request) + + self.assertEqual(cm.exception.status_code, 404) + self.assertEqual( + cm.exception.detail, + f"Field '{bad_path}' not found in sample '{self.sample.id}'", + ) + + async def test_field_is_not_a_list(self): + """Tests that a 400 is raised if the field path does not point to a list.""" + field_path = "scalar_field" + request = self._create_mock_request( + [], field_path, self.detection_id_1 + ) + + with self.assertRaises(HTTPException) as cm: + await self.mutator.patch(request) + + self.assertEqual(cm.exception.status_code, 400) + self.assertEqual( + cm.exception.detail, + f"Field '{field_path}' is not a list", + ) + + async def test_field_id_not_found_in_list(self): + """Tests that a 404 is raised if the field ID is not in the list.""" + bad_id = str(ObjectId()) + field_path = "ground_truth.detections" + request = self._create_mock_request([], field_path, bad_id) + + with self.assertRaises(HTTPException) as cm: + await self.mutator.patch(request) + + self.assertEqual(cm.exception.status_code, 404) + self.assertEqual( + cm.exception.detail, + f"Field with id '{bad_id}' not found in field '{field_path}'", + ) + + async def test_invalid_patch_operation(self): + """Tests that a 400 is raised for an invalid patch operation.""" + patch_payload = [ + {"op": "replace", "path": "/non_existent_attr", "value": "test"} + ] + field_path = "ground_truth.detections" + field_id = self.detection_id_1 + request = self._create_mock_request( + patch_payload, field_path, field_id + ) + + with self.assertRaises(HTTPException) as cm: + await self.mutator.patch(request) + + self.assertEqual(cm.exception.status_code, 400) + self.assertIn(str(patch_payload[0]), cm.exception.detail) + self.assertIn( + "non_existent_attr", cm.exception.detail[str(patch_payload[0])] + ) + if __name__ == "__main__": unittest.main(verbosity=2) diff --git a/tests/unittests/server/utils/jsonpatch/test_json_patch_patch.py b/tests/unittests/server/utils/jsonpatch/test_json_patch_patch.py new file mode 100644 index 00000000000..6ef9b5d7a09 --- /dev/null +++ b/tests/unittests/server/utils/jsonpatch/test_json_patch_patch.py @@ -0,0 +1,49 @@ +""" +| Copyright 2017-2025, Voxel51, Inc. +| `voxel51.com `_ +| +""" + +from unittest import mock + + +import pytest + +from fiftyone.server.utils.jsonpatch import methods, patch + + +@pytest.mark.parametrize( + "patch_instance", + [ + pytest.param(instance, id=instance.__class__.__name__) + for instance in ( + patch.Add(path="/a/b/c/", value=mock.Mock()), + patch.Copy(path="/a/b/c/", from_="/d/e/f"), + patch.Move(path="/a/b/c/", from_="/d/e/f"), + patch.Remove(path="/a/b/c/"), + patch.Replace(path="/a/b/c/", value=mock.Mock()), + patch.Test(path="/a/b/c/", value=mock.Mock()), + ) + ], +) +def test_helper_classes(patch_instance: patch.Patch): + """Tests that Add helper class works as expected.""" + with mock.patch.object( + methods, patch_instance.__class__.__name__.lower() + ) as m: + ##### + res = patch_instance.apply(src := mock.Mock()) + ##### + + # pylint: disable-next=protected-access + args = [src, patch_instance._pointer] + + if hasattr(patch_instance, "value"): + args.append(patch_instance.value) + + if hasattr(patch_instance, "from_"): + # pylint: disable-next=protected-access + args.append(patch_instance._from_pointer) + + m.assert_called_once_with(*args) + assert res == m.return_value diff --git a/tests/unittests/server/utils/jsonpatch/test_jsonpatch.py b/tests/unittests/server/utils/jsonpatch/test_jsonpatch.py new file mode 100644 index 00000000000..c94033009e7 --- /dev/null +++ b/tests/unittests/server/utils/jsonpatch/test_jsonpatch.py @@ -0,0 +1,175 @@ +""" +| Copyright 2017-2025, Voxel51, Inc. +| `voxel51.com `_ +| +""" + +from unittest import mock + + +import pytest + +from fiftyone.server.utils import jsonpatch + + +class TestParse: + """Tests for jsonpatch .parse.""" + + @pytest.fixture(name="patches") + def fixture_patches(self): + """Returns a list of example patch dicts.""" + return [ + {"op": "add", "path": "/a/b/c", "value": mock.Mock()}, + {"op": "copy", "path": "/d/e/f", "from": "/a/b/c"}, + {"op": "move", "path": "/d/e/f", "from": "/a/b/c"}, + {"op": "remove", "path": "/x/y/0"}, + {"op": "replace", "path": "/x/y/0", "value": mock.Mock()}, + {"op": "test", "path": "/x/y/0", "value": mock.Mock()}, + ] + + @staticmethod + def test_missing_common_fields(patches): + """Tests that 'op' and 'path' are required fields.""" + for key in ("op", "path"): + patch = patches[0].copy() + patch.pop(key) + + with pytest.raises(ValueError): + ##### + jsonpatch.parse(patch) + ##### + + @staticmethod + def test_unsupported(patches): + """Tests that unsupported operations raise TypeError.""" + + invalid_patch = patches[0].copy() + invalid_patch["op"] = "invalid" + + with pytest.raises(TypeError): + ##### + jsonpatch.parse(invalid_patch) + ##### + + @staticmethod + def test_missing_required_values(patches): + """Tests that invalid patches raise ValueError.""" + + for patch in patches: + if patch["op"] == "remove": + continue + + if patch["op"] in ("add", "replace", "test"): + patch.pop("value") + + if patch["op"] in ("copy", "move"): + patch.pop("from") + + with pytest.raises(ValueError): + ##### + jsonpatch.parse(patch) + ##### + + @staticmethod + def test_invalid_path(patches): + """Tests that invalid patches raise ValueError.""" + + patch = patches[0] + patch["path"] = patch["path"][1:] + + with pytest.raises(ValueError): + ##### + jsonpatch.parse(patch) + ##### + + @staticmethod + def test_bad_transform(patches): + """Tests that exceptions in transform_fn raise ValueError.""" + + transform_fn = mock.Mock() + transform_fn.side_effect = Exception("Uh oh!") + + for patch in patches: + if patch["op"] not in ("add", "replace", "test"): + continue + + with pytest.raises(ValueError): + ##### + jsonpatch.parse(patch, transform_fn=transform_fn) + ##### + + transform_fn.assert_called_with(patch["value"]) + + assert transform_fn.call_count == 3 + + @staticmethod + @pytest.mark.parametrize( + "transform_fn", + ( + pytest.param(None, id=""), + pytest.param(mock.Mock(), id="transform"), + ), + ) + def test_ok(transform_fn, patches): + """Tests that valid patches are parsed correctly.""" + + for patch in patches: + ##### + res = jsonpatch.parse(patch, transform_fn=transform_fn) + ##### + + assert isinstance(res, jsonpatch.Patch) + res: jsonpatch.Patch + assert res.op == patch["op"] + assert res.path == patch["path"] + + if patch["op"] in ("add", "replace", "test"): + if transform_fn is None: + assert res.value == patch["value"] + else: + transform_fn.assert_called_with(patch["value"]) + assert res.value == transform_fn.return_value + + if patch["op"] in ("copy", "move"): + assert res.from_ == patch["from"] + + @staticmethod + @pytest.mark.parametrize( + "transform_fn", + ( + pytest.param(None, id=""), + pytest.param(mock.Mock(), id="transform"), + ), + ) + def test_ok_multi(transform_fn, patches): + """Tests that valid patches are parsed correctly.""" + + ##### + res = jsonpatch.parse(patches, transform_fn=transform_fn) + ##### + + assert isinstance(res, list) + + for i, patch in enumerate(res): + assert isinstance(patch, jsonpatch.Patch) + + assert patch.op == patches[i]["op"] + assert patch.path == patches[i]["path"] + + if hasattr(patch, "value"): + if transform_fn is None: + assert patch.value == patches[i]["value"] + else: + assert patch.value == transform_fn.return_value + + if hasattr(patch, "from_"): + assert patch.from_ == patches[i]["from"] + + if transform_fn is not None: + transform_fn.assert_has_calls( + [ + mock.call(patch["value"]) + for patch in patches + if "value" in patch + ] + ) diff --git a/tests/unittests/server/utils/jsonpatch/test_jsonpatch_methods.py b/tests/unittests/server/utils/jsonpatch/test_jsonpatch_methods.py new file mode 100644 index 00000000000..ea2d65e8784 --- /dev/null +++ b/tests/unittests/server/utils/jsonpatch/test_jsonpatch_methods.py @@ -0,0 +1,429 @@ +""" +| Copyright 2017-2025, Voxel51, Inc. +| `voxel51.com `_ +| +""" + +import dataclasses +from typing import Any, Literal, Union +from unittest import mock + +import jsonpointer +import pytest + +from fiftyone.server.utils.jsonpatch import methods +from fiftyone.server.utils.jsonpatch.methods import ( + get, + add, + remove, + test as _test, # avoid pytest auto adding 'test' as a case + replace, + copy, + move, +) + + +@dataclasses.dataclass +class Name: + """A person's name.""" + + given: str + family: str + + +@dataclasses.dataclass +class Pet: + """A pet.""" + + name: str + type: Literal["cat", "dog", "fish"] + meta: dict[str, Any] = dataclasses.field(default_factory=lambda: {}) + + +@dataclasses.dataclass +class Person: + """A person.""" + + name: Name + age: int + pets: list[Pet] + + def __setattr__(self, name, value): + if name not in [ + field.name for field in dataclasses.fields(self.__class__) + ]: + raise AttributeError(f"Cannot set attribute {name}") + super().__setattr__(name, value) + + def __delattr__(self, name): + raise AttributeError(f"Deletion of '{name}' is not allowed.") + + +@pytest.fixture(name="person") +def fixture_person(): + """Returns an example Person.""" + return Person( + name=Name(given="Alice", family="Smith"), + age=30, + pets=[ + Pet(name="Fluffy", type="cat", meta={"color": "white"}), + Pet(name="Spot", type="dog"), + ], + ) + + +class TestGet: + """Tests for get.""" + + @staticmethod + def test_attribute_err(person: Person): + """Tests error is raised for invalid attributes.""" + + with pytest.raises(AttributeError): + ##### + get(person, "/random") + ##### + + @staticmethod + @pytest.mark.parametrize("key", ["not_an_index"]) + def test_value_err(key: str, person: Person): + """Tests error is raised for invalid list indices.""" + + with pytest.raises(AttributeError): + ##### + get(person, f"/pets/{key}") + ##### + + @staticmethod + @pytest.mark.parametrize("idx", [-1, 2]) + def test_index_err(idx: int, person: Person): + """Tests error is raised for out-of-bounds indices.""" + + with pytest.raises(AttributeError): + ##### + get(person, f"/pets/{str(idx)}") + ##### + + @staticmethod + def test_getattr(person: Person): + """Tests that attributes are retrieved.""" + ##### + res = get(person, "/pets/0/name") + ##### + + assert res == person.pets[0].name + + @staticmethod + def test_getitem(person: Person): + """Tests that dict items are retrieved.""" + + ##### + res = get(person, "/pets/0/meta/color") + ##### + + assert res == person.pets[0].meta["color"] + + @staticmethod + @pytest.mark.parametrize("idx", [0, 1]) + def test_list_getitem(idx: int, person: Person): + """Tests that list items are retrieved.""" + + ##### + res = get(person, f"/pets/{idx}") + ##### + + assert res == person.pets[idx] + + +class TestAdd: + """Tests for add.""" + + @staticmethod + def test_path_is_root(person: Person): + """Tests that adding at the root replaces the object.""" + + #### + res = add(person, "", value := mock.Mock) + #### + + assert res == value + + @staticmethod + def test_path_err(person: Person): + """Tests that AttributeError is raised for invalid paths.""" + + with pytest.raises(AttributeError): + ##### + add(person, "/a/b/c", mock.Mock) + ##### + + @staticmethod + def test_cannot_set_attribute(person: Person): + """Tests error is raised for invalid attributes.""" + + with pytest.raises(ValueError): + ##### + add(person, "/random", mock.Mock()) + ##### + + @staticmethod + @pytest.mark.parametrize("key", ["not_an_index"]) + def test_bad_index(key: str, person: Person): + """Tests error is raised for invalid list indices.""" + + with pytest.raises(ValueError): + ##### + add(person, f"/pets/{key}", mock.Mock()) + ##### + + @staticmethod + @pytest.mark.parametrize("idx", [-1, 3]) + def test_index_out_of_bounds(idx: int, person: Person): + """Tests error is raised for out-of-bounds indices.""" + + with pytest.raises(ValueError): + ##### + add(person, f"/pets/{str(idx)}", mock.Mock()) + ##### + + @staticmethod + def test_set_attribute(person: Person): + """Tests that new attributes are set.""" + + assert not hasattr(person.pets[0], "random") + + ##### + res = add(person, "/pets/0/random", value := mock.Mock()) + ##### + + assert person.pets[0].random == value + assert res == person + + @staticmethod + def test_set_item_existing(person: Person): + """Tests that existing dict items are set.""" + original_value = person.pets[0].meta["color"] + + ##### + res = add(person, "/pets/0/meta/color", value := mock.Mock()) + ##### + + assert person.pets[0].meta["color"] != original_value + assert person.pets[0].meta["color"] == value + assert res == person + + @staticmethod + def test_set_item_new(person: Person): + """Tests that new dict items are set.""" + assert "random" not in person.pets[0].meta + + ##### + res = add(person, "/pets/0/meta/random", value := mock.Mock()) + ##### + + assert "random" in person.pets[0].meta + assert person.pets[0].meta["random"] == value + assert res == person + + @staticmethod + @pytest.mark.parametrize("idx", [0, 1]) + def test_list_set_item(idx: int, person: Person): + """Tests that list items are set.""" + length = len(person.pets) + + ##### + res = add(person, f"/pets/{str(idx)}", value := mock.Mock()) + ##### + + assert len(person.pets) == length + 1 + assert person.pets[idx] == value + assert res == person + + @staticmethod + @pytest.mark.parametrize("idx", ["-", 2]) + def test_append_list_item(idx: Union[str, int], person: Person): + """Tests that list items are appended when using "-" or the index""" + + ##### + res = add(person, f"/pets/{idx}", value := mock.Mock()) + ##### + + assert len(person.pets) == 3 + assert person.pets[-1] == value + assert res == person + + +class TestRemove: + """Tests for remove.""" + + @staticmethod + def test_path_is_root(person: Person): + """Test that attempting to remove the root raises ValueError.""" + + with pytest.raises(ValueError): + #### + remove(person, "") + #### + + @staticmethod + def test_path_err(person: Person): + """Tests that AttributeError is raised for invalid paths.""" + + with pytest.raises(AttributeError): + ##### + remove(person, "/a/b/c") + ##### + + @staticmethod + def test_delete_attribute_forbidden(person: Person): + """Tests error is raised when deleting attribute is forbidden.""" + + with pytest.raises(ValueError): + ##### + remove(person, "/name") + ##### + + @staticmethod + def test_delete_attribute(person: Person): + """Tests that attribute is deleted.""" + + assert hasattr(person.pets[0], "type") + + ##### + res = remove(person, "/pets/0/type") + ##### + + assert not hasattr(person.pets[0], "type") + assert res == person + + @staticmethod + def test_delete_item(person: Person): + """Tests that dict items are deleted.""" + + assert "color" in person.pets[0].meta + + ##### + res = remove(person, "/pets/0/meta/color") + ##### + + assert "color" not in person.pets[0].meta + assert res == person + + @staticmethod + @pytest.mark.parametrize("idx", [0, 1]) + def test_list_delitem(idx: int, person: Person): + """Tests that list items are deleted.""" + + length = len(person.pets) + value = person.pets[idx] + + ##### + res = remove(person, f"/pets/{str(idx)}") + ##### + + assert len(person.pets) == length - 1 + assert value not in person.pets + assert res == person + + +class TestTest: + """Tests for test.""" + + @staticmethod + def test_err(person: Person): + """Tests error is raised when the test fails.""" + + for path in ( + "", + "/name/family", + "/age", + "/pets", + "/pets/0", + "/pets/0/name", + ): + with pytest.raises(ValueError): + ##### + _test(person, path, mock.Mock()) + ##### + + @staticmethod + def test_ok(person: Person): + """Tests that valid Test operations succeed.""" + + for path, value in ( + ("", person), + ("/name/family", person.name.family), + ("/age", person.age), + ("/pets", person.pets), + ("/pets/0", person.pets[0]), + ("/pets/0/name", person.pets[0].name), + ): + ##### + res = _test(person, path, value) + ##### + + assert res == person + + +def test_copy(): + """Tests for copy.""" + + src = mock.Mock() + + with ( + mock.patch.object(methods, "get") as mock_get, + mock.patch.object(methods, "add") as mock_add, + ): + pointer = jsonpointer.JsonPointer("/a/b/c") + from_pointer = jsonpointer.JsonPointer("/d/e/f") + + ##### + res = copy(src, pointer, from_pointer) + ##### + + mock_get.assert_called_once_with(src, from_pointer) + mock_add.assert_called_once_with(src, pointer, mock_get.return_value) + assert res == src + + +def test_move(): + """Tests for move.""" + + src = mock.Mock() + + with ( + mock.patch.object(methods, "get") as mock_get, + mock.patch.object(methods, "remove") as mock_remove, + mock.patch.object(methods, "add") as mock_add, + ): + pointer = jsonpointer.JsonPointer("/a/b/c") + from_pointer = jsonpointer.JsonPointer("/d/e/f") + + ##### + res = move(src, pointer, from_pointer) + ##### + + mock_get.assert_called_once_with(src, from_pointer) + mock_remove.assert_called_once_with(src, from_pointer) + mock_add.assert_called_once_with(src, pointer, mock_get.return_value) + assert res == src + + +def test_replace(): + """Tests for replace.""" + + src = mock.Mock() + + with ( + mock.patch.object(methods, "remove") as mock_remove, + mock.patch.object(methods, "add") as mock_add, + ): + pointer = jsonpointer.JsonPointer("/a/b/c") + + ##### + res = replace(src, pointer, value := mock.Mock()) + ##### + + mock_remove.assert_called_once_with(src, pointer) + mock_add.assert_called_once_with(src, pointer, value) + assert res == src