From 8a954b8b0b9d9079b8deccc97e36f03463d803ac Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Thu, 14 Nov 2024 17:59:43 -0800 Subject: [PATCH 01/11] bare bones spec for MLTensor --- index.bs | 330 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 329 insertions(+), 1 deletion(-) diff --git a/index.bs b/index.bs index da1bf59a..7d726c9a 100644 --- a/index.bs +++ b/index.bs @@ -861,6 +861,7 @@ The {{MLContext}} interface represents a global state of neural network compute @@ -888,6 +897,11 @@ interface MLContext { : \[[powerPreference]] of type {{MLPowerPreference}}. :: The {{MLContext}}'s {{MLPowerPreference}}. + : \[[timeline]] + :: + A timeline associated with the execution of operations on the compute units of the {{MLContext}}. These operations include inferencing on [=computational graphs=] and modifying the {{MLTensor/[[data]]}} of {{MLTensor}}s. + + Issue(520): More rigorously define this timeline. @@ -919,6 +933,17 @@ When the {{MLContext/[[contextType]]}} is set to [=context type/default=] with t 1. If |bufferView|.\[[ByteLength]] is not equal to |descriptor|'s [=MLOperandDescriptor/byte length=], return false. +
+ + To validate tensors with descriptors given an {{MLNamedTensors}} |namedTensors| with [=record=]<{{USVString}}, {{MLOperandDescriptor}}> |namedDescriptors|: + + 1. If |namedTensors|'s [=map/size=] is not equal to |namedDescriptors|'s [=map/size=], then return false. + 1. [=map/For each=] |name| → |tensor| of |namedTensors|: + 1. If |namedDescriptors|[|name|] does not [=map/exist=], then return false. + 1. If |tensor|'s {{MLTensor/[[descriptor]]}} is not equal to |namedDescriptors|[|name|], then return false. + 1. Return true. +
+
To execute graph, given {{MLGraph}} |graph|, {{MLNamedArrayBufferViews}} |inputs| and {{MLNamedArrayBufferViews}} |outputs|, run the following steps. They return {{undefined}}, or an error. @@ -1049,6 +1074,200 @@ Note: Invocations of {{MLContext/compute()}} will fail if any of the {{MLContext
+### {{MLContext/dispatch()}} ### {#api-mlcontext-dispatch} + +Schedules the computational workload of a compiled {{MLGraph}} on the {{MLContext}}'s {{MLContext/[[timeline]]}}. + +
+ **Arguments:** + - graph: an {{MLGraph}}. The computational graph to be executed. + - inputs: an {{MLNamedTensors}}. The inputs to the computational graph. + - outputs: an {{MLNamedTensors}}. The outputs to the computational graph. + + **Returns:** {{undefined}}. +
+ +
+ + The dispatch(|graph|, |inputs|, |outputs|) method steps are: + + 1. Let |allTensors| be a [=/list=] of {{MLTensor}}s consisting of |inputs|'s [=map/values=] [=list/extended=] by |outputs|'s [=map/values=]. + 1. If |allTensors| contains any duplicate [=list/items=], then [=exception/throw=] a {{TypeError}}. + 1. [=list/For each=] |tensor| of |allTensors|: + 1. If |tensor|'s {{MLTensor/[[context]]}} is not [=this=], then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/[[isDestroyed]]}} is true, then [=exception/throw=] a {{TypeError}}. + 1. If [=validating tensors with descriptors=] given |inputs| and |graph|'s {{MLGraph/[[inputDescriptors]]}} returns false, then [=exception/throw=] a {{TypeError}}. + 1. If [=validating tensors with descriptors=] given |outputs| and |graph|'s {{MLGraph/[[outputDescriptors]]}} returns false, then [=exception/throw=] a {{TypeError}}. + 1. Enqueue the following steps to |graph|'s {{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: + 1. Issue a compute request to |graph|'s {{MLGraph/[[implementation]]}} given |inputs| and |outputs|. + + Issue(778): Add a mechanism for reporting errors during graph execution. + + 1. Return {{undefined}}. +
+ +#### Examples #### {#api-mlcontext-dispatch-examples} +
+
+ + The following code showcases executing an {{MLGraph}} using {{MLTensor}}s. + +
+    const descriptor = {dataType: 'float32', shape: [2, 2]};
+    const context = await navigator.ml.createContext();
+    const builder = new MLGraphBuilder(context);
+
+    // 1. Create a computational graph 'C = 0.2 * A + B'.
+    const constant = builder.constant(descriptor, new Float32Array(4).fill(0.2));
+    const A = builder.input('A', descriptor);
+    const B = builder.input('B', descriptor);
+    const C = builder.add(builder.mul(A, constant), B);
+
+    // 2. Compile the graph.
+    const graph = await builder.build({'C': C});
+
+    // 3. Create reusable input and output tensors.
+    const [inputTensorA, inputTensorB, outputTensorC] =
+        await Promise.all([
+          context.createTensor({
+            dataType: 'float32', shape: [2, 2], writable: true
+          }),
+          context.createTensor({
+            dataType: 'float32', shape: [2, 2], writable: true
+          }),
+          context.createTensor({
+            dataType: 'float32', shape: [2, 2], readable: true
+          })
+        ]);
+
+    // 4. Initialize the inputs.
+    context.writeTensor(inputTensorA, new Float32Array(4).fill(1.0));
+    context.writeTensor(inputTensorB, new Float32Array(4).fill(0.8));
+
+    // 5. Execute the graph.
+    const inputs = {
+      'A': inputTensorA,
+      'B': inputTensorB
+    };
+    const outputs = {
+      'C': outputTensorC
+    };
+    context.dispatch(graph, inputs, outputs);
+    
+    // 6. Read back the computed result.
+    const result = await context.readTensor(outputTensorC);
+    console.log('Output value:', new Float32Array(result));  // [1, 1, 1, 1]
+  
+
+
+ +### {{MLContext/createTensor()}} ### {#api-mlcontext-createtensor} + +Creates an {{MLTensor}} associated with this {{MLContext}}. + +
+ **Arguments:** + - descriptor: an {{MLTensorDescriptor}}. + + **Returns:** Promise. +
+ +
+ + The createTensor(|descriptor|) method steps are: + + 1. Let |global| be [=this=]'s [=relevant global object=]. + 1. Let |tensor| be the result of [=creating an MLTensor=] given [=this=], and |descriptor|. + 1. Let |promise| be [=a new promise=]. + 1. Enqueue the following steps to [=this=]'s {{MLContext/[[timeline]]}}: + 1. Create |tensor|'s {{MLTensor/[[data]]}} given |descriptor| and initialize all bytes to zeros. + 1. If that fails, then [=queue an ML task=] with |global| to [=reject=] |promise| with an "{{UnknownError}}" {{DOMException}}, and abort these steps. + 1. Otherwise, [=queue an ML task=] with |global| to [=resolve=] |promise| with |tensor|. + 1. Return |promise|. +
+ +### {{MLContext/readTensor()}} ### {#api-mlcontext-readtensor} + +Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}'s {{MLContext/[[timeline]]}} to script. + +
+ **Arguments:** + - tensor: an {{MLTensor}}. The tensor to be read. + + **Returns:** Promise. A buffer containing the result of the read. +
+ +
+ **Arguments:** + - tensor: an {{MLTensor}}. The tensor to be read. + - outputData: an {{AllowSharedBufferSource}}. The buffer to read the result into. + + **Returns:** Promise. +
+ +
+ + The readTensor(|tensor|) method steps are: + + 1. Let |global| be [=this=]'s [=relevant global object=]. + 1. Let |realm| be [=this=]'s [=relevant realm=]. + 1. If |tensor|'s {{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/readable}} is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. Let |promise| be [=a new promise=]. + 1. Enqueue the following steps to |tensor|'s {{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: + 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|'s {{MLTensor/[[data]]}}. + 1. [=Queue an ML task=] with |global| to [=ArrayBuffer/create=] an {{ArrayBuffer}} |result| given |bytes| and |realm| and then [=resolve=] |promise| with |result|. + 1. Return |promise|. +
+ +
+ + The readTensor(|tensor|, |outputData|) method steps are: + + 1. Let |global| be [=this=]'s [=relevant global object=]. + 1. If |tensor|'s {{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/readable}} is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If [=validating buffer with descriptor=] given |outputData| and |tensor|'s {{MLTensor/[[descriptor]]}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. Let |promise| be [=a new promise=]. + 1. Enqueue the following steps to |tensor|'s {{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: + 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|'s {{MLTensor/[[data]]}}. + 1. [=Queue an ML task=] with |global| to [=ArrayBuffer/write=] |bytes| to |outputData| and then [=resolve=] |promise| with {{undefined}}. + 1. Return |promise|. +
+ + +### {{MLContext/writeTensor()}} ### {#api-mlcontext-writetensor} + +Writes data to the {{MLTensor/[[data]]}} of an {{MLTensor}} on the {{MLContext}}'s {{MLContext/[[timeline]]}}. + +
+ **Arguments:** + - tensor: an {{MLTensor}}. The tensor to be written to. + - inputData: an {{AllowSharedBufferSource}}. The buffer whose bytes will be written into the tensor. + + **Returns:** {{undefined}}. +
+ +
+ + The writeTensor(|tensor|, |inputData|) method steps are: + + 1. If |tensor|'s {{MLGraph/[[context]]}} is not [=this=], then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/[[isDestroyed]]}} is true, then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/writable}} is false, then [=exception/throw=] a {{TypeError}}. + 1. If [=validating buffer with descriptor=] given |inputData| and |tensor|'s {{MLTensor/[[descriptor]]}} returns false, then [=exception/throw=] a {{TypeError}}. + 1. Let |bytes| be the result of [=getting a copy of the bytes held by the buffer source=] given |inputData|. + 1. [=Assert=]: |bytes|'s [=byte sequence/length=] is equal to |tensor|'s {{MLTensor/[[descriptor]]}}'s [=MLOperandDescriptor/byte length=]. + 1. Enqueue the following steps to |tensor|'s {{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: + 1. Copy |bytes| to |tensor|'s {{MLTensor/[[data]]}}. + + Issue(778): Add a mechanism for reporting errors while writing to a tensor. + + 1. Return {{undefined}}. +
+ ### {{MLContext/opSupportLimits()}} ### {#api-mlcontext-opsupportlimits} The {{MLContext/opSupportLimits()}} exposes level of support that differs across implementations at operator level. Consumers of the WebNN API are encouraged to probe feature support level by using {{MLContext/opSupportLimits()}} to determine the optimal model architecture to be deployed for each target platform. @@ -1325,6 +1544,115 @@ To validate operand given {{MLGraphBuilder}} |bu Issue(whatwg/webidl#1388): Support for unions of {{bigint}} and [=numeric types=] is new in [[WEBIDL]], and implementation support is also limited. Prototype implementations are encouraged to provide feedback for this approach. +## {{MLTensorDescriptor}} dictionary ## {#api-mltensordescriptor} + +An {{MLTensorDescriptor}} describes the characteristics and capabilities of an {{MLTensor}}. + + + +
+ : readable + :: Whether the tensor's contents can be read via {{MLContext/readTensor()}}. + + : writable + :: Whether the tensor's contents can be written to via {{MLContext/writeTensor()}}. +
+ +## {{MLTensor}} interface ## {#api-mltensor} + +The {{MLTensor}} interface represents a tensor which may be used as an input or output to an {{MLGraph}}. The memory backing an {{MLTensor}} should be allocated in an [=implementation-defined=] fashion according to the requirements of the {{MLContext}} and the {{MLTensorDescriptor}} used to create it. Operations involving the {{MLTensor/[[data]]}} of an {{MLTensor}} occur on the {{MLContext/[[timeline]]}} of its associated {{MLContext}}. + +Note: The [=implementation-defined=] requirements of how an {{MLTensor}} is allocated may include constraints such as that the memory is allocated with a particular byte alignment or in a particular memory pool. + + + +
+{{MLTensor}} has the following internal slots: +
+ : \[[context]] of type {{MLContext}} + :: + The {{MLTensor}}'s associated context. + + : \[[descriptor]] of type {{MLTensorDescriptor}} + :: + The {{MLTensor}}'s descriptor. + + : \[[isDestroyed]] of type {{boolean}} + :: + Whether {{MLTensor}}.{{MLTensor/destroy()}} has been called. Once destroyed, the {{MLTensor}} can no longer be used. + + : \[[data]] of an [=implementation-defined=] type + :: + The bytes backing the {{MLTensor}}. This data may only be modified from the {{MLTensor/[[context]]}}'s {{MLContext/[[timeline]]}}. +
+
+ +An {{MLTensor}}'s dataType is its {{MLTensor/[[descriptor]]}}'s {{MLOperandDescriptor/dataType}}. + +An {{MLTensor}}'s shape is its {{MLTensor/[[descriptor]]}}'s {{MLOperandDescriptor/shape}}. + +An {{MLTensor}} readable if its {{MLTensor/[[descriptor]]}}'s {{MLTensorDescriptor/readable}} is true. + +An {{MLTensor}} writable if its {{MLTensor/[[descriptor]]}}'s {{MLTensorDescriptor/writable}} is true. + +The dataType [=getter steps=] are to return [=this=]'s [=MLTensor/dataType=]. + +The shape [=getter steps=] are to return [=this=]'s [=MLTensor/shape=]. + +The readable [=getter steps=] are to return [=this=]'s [=MLTensor/readable=]. + +The writable [=getter steps=] are to return [=this=]'s [=MLTensor/writable=]. + +### Creating an {{MLTensor}} ### {#api-mltensor-create} + +An {{MLTensor}} is created with by its associated {{MLContext}}. Note that creating an {{MLTensor}} does not include initializing its {{MLTensor/[[data]]}}. This {{MLTensor/[[data]]}} should be initialized soon afterwards. + +
+ + To create an MLTensor given {{MLContext}} |context| and {{MLTensorDescriptor}} |descriptor|, run the following steps: + + 1. Let |tensor| be a new {{MLTensor}}. + 1. Set |tensor|'s {{MLTensor/[[context]]}} to |context|. + 1. Set |tensor|'s {{MLTensor/[[descriptor]]}} to |descriptor|. + 1. Set |tensor|'s {{MLTensor/[[isDestroyed]]}} to false. + 1. Return |tensor|. +
+ +### {{MLTensor/destroy()}} ### {#api-mltensor-destroy} + +Destroys the {{MLTensor}}. This method is idempotent. + +
+ **Returns:** {{undefined}}. +
+ +
+ + The destroy() method steps are: + + 1. Set [=this=]'s {{MLTensor/[[isDestroyed]]}} to true. + 1. Enqueue the following steps to [=this=]'s {{MLTensor/[[context]]}}'s {{MLContext/[[timeline]]}}: + 1. Free [=this=]'s {{MLTensor/[[data]]}}. + 1. Return {{undefined}}. +
+ +Note: Since no further operations can be enqueued using this tensor, implementations can free resource allocations associated with this tensor. + ## {{MLGraphBuilder}} interface ## {#api-mlgraphbuilder} The {{MLGraphBuilder}} interface defines a set of operations as identified by the [[#usecases]] that can be composed into a computational graph. It also represents the intermediate state of a graph building session. From 2d5d05ec23f0bc330ff0ee4855d249eb4ee9bfcf Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Fri, 15 Nov 2024 10:34:54 -0800 Subject: [PATCH 02/11] fix ambiguous ref error + split out BYOB readBuffer variant --- index.bs | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/index.bs b/index.bs index 7d726c9a..fed886ca 100644 --- a/index.bs +++ b/index.bs @@ -1186,7 +1186,7 @@ Creates an {{MLTensor}} associated with this {{MLContext}}. 1. Return |promise|. -### {{MLContext/readTensor()}} ### {#api-mlcontext-readtensor} +### {{MLContext/readTensor(tensor)}} ### {#api-mlcontext-readtensor} Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}'s {{MLContext/[[timeline]]}} to script. @@ -1197,14 +1197,6 @@ Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}'s **Returns:** Promise. A buffer containing the result of the read. -
- **Arguments:** - - tensor: an {{MLTensor}}. The tensor to be read. - - outputData: an {{AllowSharedBufferSource}}. The buffer to read the result into. - - **Returns:** Promise. -
-
The readTensor(|tensor|) method steps are: @@ -1221,6 +1213,18 @@ Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}'s 1. Return |promise|.
+### {{MLContext/readTensor(tensor, outputData)}} ### {#api-mlcontext-readtensor-byob} + +Bring-your-own-buffer variant of {{MLContext/readTensor(tensor)}}. Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} into the provided buffer. + +
+ **Arguments:** + - tensor: an {{MLTensor}}. The tensor to be read. + - outputData: an {{AllowSharedBufferSource}}. The buffer to read the result into. + + **Returns:** Promise. +
+
The readTensor(|tensor|, |outputData|) method steps are: @@ -1237,7 +1241,6 @@ Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}'s 1. Return |promise|.
- ### {{MLContext/writeTensor()}} ### {#api-mlcontext-writetensor} Writes data to the {{MLTensor/[[data]]}} of an {{MLTensor}} on the {{MLContext}}'s {{MLContext/[[timeline]]}}. @@ -1557,7 +1560,7 @@ dictionary MLTensorDescriptor : MLOperandDescriptor {
: readable - :: Whether the tensor's contents can be read via {{MLContext/readTensor()}}. + :: Whether the tensor's contents can be read via {{MLContext/readTensor(tensor)}} or {{MLContext/readTensor(tensor, outputData)}}. : writable :: Whether the tensor's contents can be written to via {{MLContext/writeTensor()}}. From e6772a37b9e0f771b1bf861c802398c4eaf20ad6 Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Fri, 15 Nov 2024 10:40:44 -0800 Subject: [PATCH 03/11] add missing semicolon --- index.bs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index.bs b/index.bs index fed886ca..08aec414 100644 --- a/index.bs +++ b/index.bs @@ -935,7 +935,7 @@ When the {{MLContext/[[contextType]]}} is set to [=context type/default=] with t
- To validate tensors with descriptors given an {{MLNamedTensors}} |namedTensors| with [=record=]<{{USVString}}, {{MLOperandDescriptor}}> |namedDescriptors|: + To validate tensors with descriptors given an {{MLNamedTensors}} |namedTensors| with [=record=]<{{USVString}}, {{MLOperandDescriptor}}> |namedDescriptors|: 1. If |namedTensors|'s [=map/size=] is not equal to |namedDescriptors|'s [=map/size=], then return false. 1. [=map/For each=] |name| → |tensor| of |namedTensors|: From 814f084afb79155c7d09b6817523302a2a42ce42 Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Fri, 15 Nov 2024 10:54:39 -0800 Subject: [PATCH 04/11] linkify **Returns:** --- index.bs | 102 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 53 insertions(+), 49 deletions(-) diff --git a/index.bs b/index.bs index 08aec414..d7397e45 100644 --- a/index.bs +++ b/index.bs @@ -901,7 +901,7 @@ interface MLContext { :: A timeline associated with the execution of operations on the compute units of the {{MLContext}}. These operations include inferencing on [=computational graphs=] and modifying the {{MLTensor/[[data]]}} of {{MLTensor}}s. - Issue(520): More rigorously define this timeline. + Issue(529): More rigorously define this timeline.
@@ -940,7 +940,7 @@ When the {{MLContext/[[contextType]]}} is set to [=context type/default=] with t 1. If |namedTensors|'s [=map/size=] is not equal to |namedDescriptors|'s [=map/size=], then return false. 1. [=map/For each=] |name| → |tensor| of |namedTensors|: 1. If |namedDescriptors|[|name|] does not [=map/exist=], then return false. - 1. If |tensor|'s {{MLTensor/[[descriptor]]}} is not equal to |namedDescriptors|[|name|], then return false. + 1. If |tensor|.{{MLTensor/[[descriptor]]}} is not equal to |namedDescriptors|[|name|], then return false. 1. Return true. @@ -964,7 +964,7 @@ When the {{MLContext/[[contextType]]}} is set to [=context type/default=] with t 1. If the byte length of |outputTensor| is not equal to |outputDesc|'s [=MLOperandDescriptor/byte length=], then return a {{TypeError}}. 1. If |outputTensor|'s [=element type=] doesn't match |outputValue|'s [=element type=], then return a {{TypeError}}. 1. Request the underlying implementation of |graph| to set the values of elements in |outputValue| to the values of elements in |outputTensor|. - 1. Return {{undefined}}. + 1. Return undefined. ### {{MLNamedArrayBufferViews}} transfer algorithm ### {#mlnamedarraybufferviews-transfer-alg} @@ -1082,11 +1082,13 @@ Schedules the computational workload of a compiled {{MLGraph}} on the {{MLContex **Arguments:** - graph: an {{MLGraph}}. The computational graph to be executed. - inputs: an {{MLNamedTensors}}. The inputs to the computational graph. - - outputs: an {{MLNamedTensors}}. The outputs to the computational graph. + - outputs: an {{MLNamedTensors}}. The outputs of the computational graph. **Returns:** {{undefined}}. +Note: `dispatch()` itself provides no signal that graph execution has completed. Rather, callers should await the results of reading back the output tensors. See [[#api-mlcontext-dispatch-examples]] below. +
The dispatch(|graph|, |inputs|, |outputs|) method steps are: @@ -1094,16 +1096,16 @@ Schedules the computational workload of a compiled {{MLGraph}} on the {{MLContex 1. Let |allTensors| be a [=/list=] of {{MLTensor}}s consisting of |inputs|'s [=map/values=] [=list/extended=] by |outputs|'s [=map/values=]. 1. If |allTensors| contains any duplicate [=list/items=], then [=exception/throw=] a {{TypeError}}. 1. [=list/For each=] |tensor| of |allTensors|: - 1. If |tensor|'s {{MLTensor/[[context]]}} is not [=this=], then [=exception/throw=] a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/[[isDestroyed]]}} is true, then [=exception/throw=] a {{TypeError}}. - 1. If [=validating tensors with descriptors=] given |inputs| and |graph|'s {{MLGraph/[[inputDescriptors]]}} returns false, then [=exception/throw=] a {{TypeError}}. - 1. If [=validating tensors with descriptors=] given |outputs| and |graph|'s {{MLGraph/[[outputDescriptors]]}} returns false, then [=exception/throw=] a {{TypeError}}. - 1. Enqueue the following steps to |graph|'s {{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: - 1. Issue a compute request to |graph|'s {{MLGraph/[[implementation]]}} given |inputs| and |outputs|. + 1. If |tensor|.{{MLTensor/[[context]]}} is not [=this=], then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then [=exception/throw=] a {{TypeError}}. + 1. If [=validating tensors with descriptors=] given |inputs| and |graph|. {{MLGraph/[[inputDescriptors]]}} returns false, then [=exception/throw=] a {{TypeError}}. + 1. If [=validating tensors with descriptors=] given |outputs| and |graph|. {{MLGraph/[[outputDescriptors]]}} returns false, then [=exception/throw=] a {{TypeError}}. + 1. Enqueue the following steps to |graph|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: + 1. Issue a compute request to |graph|.{{MLGraph/[[implementation]]}} given |inputs| and |outputs|. Issue(778): Add a mechanism for reporting errors during graph execution. - 1. Return {{undefined}}. + 1. Return undefined.
#### Examples #### {#api-mlcontext-dispatch-examples} @@ -1169,7 +1171,7 @@ Creates an {{MLTensor}} associated with this {{MLContext}}. **Arguments:** - descriptor: an {{MLTensorDescriptor}}. - **Returns:** Promise. + **Returns:** {{Promise}}<{{MLTensor}}>.
@@ -1179,8 +1181,8 @@ Creates an {{MLTensor}} associated with this {{MLContext}}. 1. Let |global| be [=this=]'s [=relevant global object=]. 1. Let |tensor| be the result of [=creating an MLTensor=] given [=this=], and |descriptor|. 1. Let |promise| be [=a new promise=]. - 1. Enqueue the following steps to [=this=]'s {{MLContext/[[timeline]]}}: - 1. Create |tensor|'s {{MLTensor/[[data]]}} given |descriptor| and initialize all bytes to zeros. + 1. Enqueue the following steps to [=this=].{{MLContext/[[timeline]]}}: + 1. Create |tensor|.{{MLTensor/[[data]]}} given |descriptor| and initialize all bytes to zeros. 1. If that fails, then [=queue an ML task=] with |global| to [=reject=] |promise| with an "{{UnknownError}}" {{DOMException}}, and abort these steps. 1. Otherwise, [=queue an ML task=] with |global| to [=resolve=] |promise| with |tensor|. 1. Return |promise|. @@ -1188,13 +1190,13 @@ Creates an {{MLTensor}} associated with this {{MLContext}}. ### {{MLContext/readTensor(tensor)}} ### {#api-mlcontext-readtensor} -Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}'s {{MLContext/[[timeline]]}} to script. +Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}.{{MLContext/[[timeline]]}} to script.
**Arguments:** - tensor: an {{MLTensor}}. The tensor to be read. - **Returns:** Promise. A buffer containing the result of the read. + **Returns:** {{Promise}}<{{ArrayBuffer}}>. A buffer containing the result of the read.
@@ -1203,13 +1205,14 @@ Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}'s 1. Let |global| be [=this=]'s [=relevant global object=]. 1. Let |realm| be [=this=]'s [=relevant realm=]. - 1. If |tensor|'s {{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If |tensor|'s {{MLTensor/readable}} is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. Let |promise| be [=a new promise=]. - 1. Enqueue the following steps to |tensor|'s {{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: - 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|'s {{MLTensor/[[data]]}}. - 1. [=Queue an ML task=] with |global| to [=ArrayBuffer/create=] an {{ArrayBuffer}} |result| given |bytes| and |realm| and then [=resolve=] |promise| with |result|. + 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: + 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|.{{MLTensor/[[data]]}}. + 1. If that fails, then [=queue an ML task=] with |global| to [=reject=] |promise| with an "{{UnknownError}}" {{DOMException}}, and abort these steps. + 1. Otherwise, [=queue an ML task=] with |global| to [=ArrayBuffer/create=] an {{ArrayBuffer}} |result| given |bytes| and |realm| and then [=resolve=] |promise| with |result|. 1. Return |promise|.
@@ -1222,7 +1225,7 @@ Bring-your-own-buffer variant of {{MLContext/readTensor(tensor)}}. Reads back th - tensor: an {{MLTensor}}. The tensor to be read. - outputData: an {{AllowSharedBufferSource}}. The buffer to read the result into. - **Returns:** Promise. + **Returns:** {{Promise}}<{{undefined}}>.
@@ -1230,14 +1233,15 @@ Bring-your-own-buffer variant of {{MLContext/readTensor(tensor)}}. Reads back th The readTensor(|tensor|, |outputData|) method steps are: 1. Let |global| be [=this=]'s [=relevant global object=]. - 1. If |tensor|'s {{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If |tensor|'s {{MLTensor/readable}} is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. - 1. If [=validating buffer with descriptor=] given |outputData| and |tensor|'s {{MLTensor/[[descriptor]]}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If [=validating buffer with descriptor=] given |outputData| and |tensor|.{{MLTensor/[[descriptor]]}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. Let |promise| be [=a new promise=]. - 1. Enqueue the following steps to |tensor|'s {{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: - 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|'s {{MLTensor/[[data]]}}. - 1. [=Queue an ML task=] with |global| to [=ArrayBuffer/write=] |bytes| to |outputData| and then [=resolve=] |promise| with {{undefined}}. + 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: + 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|.{{MLTensor/[[data]]}}. + 1. If that fails, then [=queue an ML task=] with |global| to [=reject=] |promise| with an "{{UnknownError}}" {{DOMException}}, and abort these steps. + 1. Otherwise, [=queue an ML task=] with |global| to [=ArrayBuffer/write=] |bytes| to |outputData| and then [=resolve=] |promise| with {{undefined}}. 1. Return |promise|.
@@ -1257,18 +1261,18 @@ Writes data to the {{MLTensor/[[data]]}} of an {{MLTensor}} on the {{MLContext}} The writeTensor(|tensor|, |inputData|) method steps are: - 1. If |tensor|'s {{MLGraph/[[context]]}} is not [=this=], then [=exception/throw=] a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/[[isDestroyed]]}} is true, then [=exception/throw=] a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/writable}} is false, then [=exception/throw=] a {{TypeError}}. - 1. If [=validating buffer with descriptor=] given |inputData| and |tensor|'s {{MLTensor/[[descriptor]]}} returns false, then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|.{{MLTensor/writable}} is false, then [=exception/throw=] a {{TypeError}}. + 1. If [=validating buffer with descriptor=] given |inputData| and |tensor|.{{MLTensor/[[descriptor]]}} returns false, then [=exception/throw=] a {{TypeError}}. 1. Let |bytes| be the result of [=getting a copy of the bytes held by the buffer source=] given |inputData|. - 1. [=Assert=]: |bytes|'s [=byte sequence/length=] is equal to |tensor|'s {{MLTensor/[[descriptor]]}}'s [=MLOperandDescriptor/byte length=]. - 1. Enqueue the following steps to |tensor|'s {{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: - 1. Copy |bytes| to |tensor|'s {{MLTensor/[[data]]}}. + 1. [=Assert=]: |bytes|'s [=byte sequence/length=] is equal to |tensor|.{{MLTensor/[[descriptor]]}}'s [=MLOperandDescriptor/byte length=]. + 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: + 1. Copy |bytes| to |tensor|.{{MLTensor/[[data]]}}. Issue(778): Add a mechanism for reporting errors while writing to a tensor. - 1. Return {{undefined}}. + 1. Return undefined.
### {{MLContext/opSupportLimits()}} ### {#api-mlcontext-opsupportlimits} @@ -1601,7 +1605,7 @@ interface MLTensor { : \[[data]] of an [=implementation-defined=] type :: - The bytes backing the {{MLTensor}}. This data may only be modified from the {{MLTensor/[[context]]}}'s {{MLContext/[[timeline]]}}. + The bytes backing the {{MLTensor}}. This data may only be accessed or modified from the {{MLTensor/[[context]]}}'s {{MLContext/[[timeline]]}}. @@ -1609,9 +1613,9 @@ An {{MLTensor}}'s dataType is its {{MLTensor/[[descripto An {{MLTensor}}'s shape is its {{MLTensor/[[descriptor]]}}'s {{MLOperandDescriptor/shape}}. -An {{MLTensor}} readable if its {{MLTensor/[[descriptor]]}}'s {{MLTensorDescriptor/readable}} is true. +An {{MLTensor}} is readable if its {{MLTensor/[[descriptor]]}}'s {{MLTensorDescriptor/readable}} is true. -An {{MLTensor}} writable if its {{MLTensor/[[descriptor]]}}'s {{MLTensorDescriptor/writable}} is true. +An {{MLTensor}} is writable if its {{MLTensor/[[descriptor]]}}'s {{MLTensorDescriptor/writable}} is true. The dataType [=getter steps=] are to return [=this=]'s [=MLTensor/dataType=]. @@ -1623,22 +1627,22 @@ The writable [=getter steps=] are to return [= ### Creating an {{MLTensor}} ### {#api-mltensor-create} -An {{MLTensor}} is created with by its associated {{MLContext}}. Note that creating an {{MLTensor}} does not include initializing its {{MLTensor/[[data]]}}. This {{MLTensor/[[data]]}} should be initialized soon afterwards. +An {{MLTensor}} is created by its associated {{MLContext}}.
To create an MLTensor given {{MLContext}} |context| and {{MLTensorDescriptor}} |descriptor|, run the following steps: 1. Let |tensor| be a new {{MLTensor}}. - 1. Set |tensor|'s {{MLTensor/[[context]]}} to |context|. - 1. Set |tensor|'s {{MLTensor/[[descriptor]]}} to |descriptor|. - 1. Set |tensor|'s {{MLTensor/[[isDestroyed]]}} to false. + 1. Set |tensor|.{{MLTensor/[[context]]}} to |context|. + 1. Set |tensor|.{{MLTensor/[[descriptor]]}} to |descriptor|. + 1. Set |tensor|.{{MLTensor/[[isDestroyed]]}} to false. 1. Return |tensor|.
### {{MLTensor/destroy()}} ### {#api-mltensor-destroy} -Destroys the {{MLTensor}}. This method is idempotent. +Frees the resources associated with the {{MLTensor}}. This method is idempotent.
**Returns:** {{undefined}}. @@ -1648,13 +1652,13 @@ Destroys the {{MLTensor}}. This method is idempotent. The destroy() method steps are: - 1. Set [=this=]'s {{MLTensor/[[isDestroyed]]}} to true. - 1. Enqueue the following steps to [=this=]'s {{MLTensor/[[context]]}}'s {{MLContext/[[timeline]]}}: - 1. Free [=this=]'s {{MLTensor/[[data]]}}. - 1. Return {{undefined}}. + 1. Set [=this=].{{MLTensor/[[isDestroyed]]}} to true. + 1. Enqueue the following steps to [=this=].{{MLTensor/[[context]]}}'s {{MLContext/[[timeline]]}}: + 1. Free [=this=].{{MLTensor/[[data]]}}. + 1. Return undefined. -Note: Since no further operations can be enqueued using this tensor, implementations can free resource allocations associated with this tensor. +Note: Since no further operations can be enqueued using this tensor, implementations can free any additional resource allocations associated with this tensor. ## {{MLGraphBuilder}} interface ## {#api-mlgraphbuilder} From 8b91ea1fbfa587ce8c73eede8c84642f69539ab1 Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Fri, 15 Nov 2024 15:59:42 -0800 Subject: [PATCH 05/11] address inexorabletash feedback --- index.bs | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/index.bs b/index.bs index d7397e45..d491c355 100644 --- a/index.bs +++ b/index.bs @@ -1207,7 +1207,7 @@ Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}.{ 1. Let |realm| be [=this=]'s [=relevant realm=]. 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/readable}} is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|.{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/readable}}"] is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. Let |promise| be [=a new promise=]. 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|.{{MLTensor/[[data]]}}. @@ -1235,7 +1235,7 @@ Bring-your-own-buffer variant of {{MLContext/readTensor(tensor)}}. Reads back th 1. Let |global| be [=this=]'s [=relevant global object=]. 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/readable}} is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/readable}}"] is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If [=validating buffer with descriptor=] given |outputData| and |tensor|.{{MLTensor/[[descriptor]]}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. Let |promise| be [=a new promise=]. 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: @@ -1263,7 +1263,7 @@ Writes data to the {{MLTensor/[[data]]}} of an {{MLTensor}} on the {{MLContext}} 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then [=exception/throw=] a {{TypeError}}. 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then [=exception/throw=] a {{TypeError}}. - 1. If |tensor|.{{MLTensor/writable}} is false, then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|.{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/writable}}"] is false, then [=exception/throw=] a {{TypeError}}. 1. If [=validating buffer with descriptor=] given |inputData| and |tensor|.{{MLTensor/[[descriptor]]}} returns false, then [=exception/throw=] a {{TypeError}}. 1. Let |bytes| be the result of [=getting a copy of the bytes held by the buffer source=] given |inputData|. 1. [=Assert=]: |bytes|'s [=byte sequence/length=] is equal to |tensor|.{{MLTensor/[[descriptor]]}}'s [=MLOperandDescriptor/byte length=]. @@ -1613,17 +1613,13 @@ An {{MLTensor}}'s dataType is its {{MLTensor/[[descripto An {{MLTensor}}'s shape is its {{MLTensor/[[descriptor]]}}'s {{MLOperandDescriptor/shape}}. -An {{MLTensor}} is readable if its {{MLTensor/[[descriptor]]}}'s {{MLTensorDescriptor/readable}} is true. - -An {{MLTensor}} is writable if its {{MLTensor/[[descriptor]]}}'s {{MLTensorDescriptor/writable}} is true. - The dataType [=getter steps=] are to return [=this=]'s [=MLTensor/dataType=]. The shape [=getter steps=] are to return [=this=]'s [=MLTensor/shape=]. -The readable [=getter steps=] are to return [=this=]'s [=MLTensor/readable=]. +The readable [=getter steps=] are to return [=this=].{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/readable}}"]. -The writable [=getter steps=] are to return [=this=]'s [=MLTensor/writable=]. +The writable [=getter steps=] are to return [=this=].{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/writable}}"]. ### Creating an {{MLTensor}} ### {#api-mltensor-create} From 4043982383266702bd777c0d94fdbd31326c1e8b Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Wed, 20 Nov 2024 09:21:14 -0800 Subject: [PATCH 06/11] validate AllowSharedBufferSource + other nits --- index.bs | 53 +++++++++++++++++++++++++++++++++++------------------ 1 file changed, 35 insertions(+), 18 deletions(-) diff --git a/index.bs b/index.bs index d491c355..03252759 100644 --- a/index.bs +++ b/index.bs @@ -927,10 +927,18 @@ When the {{MLContext/[[contextType]]}} is set to [=context type/default=] with t
- To validate buffer with descriptor given {{ArrayBufferView}} |bufferView| and {{MLOperandDescriptor}} |descriptor|, run the following steps: + To validate buffer with descriptor given {{AllowSharedBufferSource}} |bufferSource| and {{MLOperandDescriptor}} |descriptor|, run the following steps: - 1. If |bufferView|'s [=element type=] does not match to |descriptor|.{{MLOperandDescriptor/dataType}} according to [this table](#appendices-mloperanddatatype-arraybufferview-compatibility), return false. - 1. If |bufferView|.\[[ByteLength]] is not equal to |descriptor|'s [=MLOperandDescriptor/byte length=], return false. + 1. If |bufferSource|'s [=BufferSource/byte length=] is not equal to |descriptor|'s [=MLOperandDescriptor/byte length=] return false. + 1. Switch on |bufferSource|: +
+ : {{ArrayBuffer}} + :: Return true. + : {{SharedArrayBuffer}} + :: Return true. + : {{ArrayBufferView}} + :: If |bufferSource|'s [=element type=] matches |descriptor|'s {{MLOperandDescriptor/dataType}} according to [this table](#appendices-mloperanddatatype-arraybufferview-compatibility) return true, otherwise return false. +
@@ -1087,7 +1095,7 @@ Schedules the computational workload of a compiled {{MLGraph}} on the {{MLContex **Returns:** {{undefined}}.
-Note: `dispatch()` itself provides no signal that graph execution has completed. Rather, callers should await the results of reading back the output tensors. See [[#api-mlcontext-dispatch-examples]] below. +Note: similar to `dispatch()`, `writeTensor()` itself provides no signal that the write has completed. To inspect the contents of a tensor, callers should await the results of reading back the tensor.
@@ -1103,7 +1111,7 @@ Note: `dispatch()` itself provides no signal that graph execution has completed. 1. Enqueue the following steps to |graph|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: 1. Issue a compute request to |graph|.{{MLGraph/[[implementation]]}} given |inputs| and |outputs|. - Issue(778): Add a mechanism for reporting errors during graph execution. + Issue(778): Add a mechanism for reporting errors during graph execution. 1. Return undefined.
@@ -1132,13 +1140,13 @@ Note: `dispatch()` itself provides no signal that graph execution has completed. const [inputTensorA, inputTensorB, outputTensorC] = await Promise.all([ context.createTensor({ - dataType: 'float32', shape: [2, 2], writable: true + dataType: A.dataType, shape: A.shape, writable: true }), context.createTensor({ - dataType: 'float32', shape: [2, 2], writable: true + dataType: B.dataType, shape: B.shape, writable: true }), context.createTensor({ - dataType: 'float32', shape: [2, 2], readable: true + dataType: C.dataType, shape: C.shape, readable: true }) ]); @@ -1207,7 +1215,7 @@ Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}.{ 1. Let |realm| be [=this=]'s [=relevant realm=]. 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. - 1. If |tensor|.{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/readable}}"] is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/readable}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. Let |promise| be [=a new promise=]. 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|.{{MLTensor/[[data]]}}. @@ -1235,13 +1243,19 @@ Bring-your-own-buffer variant of {{MLContext/readTensor(tensor)}}. Reads back th 1. Let |global| be [=this=]'s [=relevant global object=]. 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. - 1. If |tensor|{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/readable}}"] is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/readable}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If [=validating buffer with descriptor=] given |outputData| and |tensor|.{{MLTensor/[[descriptor]]}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. Let |promise| be [=a new promise=]. - 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}'s {{MLContext/[[timeline]]}}: + 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|.{{MLTensor/[[data]]}}. 1. If that fails, then [=queue an ML task=] with |global| to [=reject=] |promise| with an "{{UnknownError}}" {{DOMException}}, and abort these steps. - 1. Otherwise, [=queue an ML task=] with |global| to [=ArrayBuffer/write=] |bytes| to |outputData| and then [=resolve=] |promise| with {{undefined}}. + 1. Otherwise, [=queue an ML task=] with |global| and the following steps: + 1. [=ArrayBuffer/write=] |bytes| to |outputData|. + 1. If that fails, [=reject=] |promise| with an "{{UnknownError}}" {{DOMException}}, and abort these steps. + + Note: Writing to |outputData| may fail if the buffer has been detached between when it was [=validating buffer with descriptor|validated=] above and these steps. + + 1. Otherwise, [=resolve=] |promise| with {{undefined}}. 1. Return |promise|. @@ -1263,18 +1277,21 @@ Writes data to the {{MLTensor/[[data]]}} of an {{MLTensor}} on the {{MLContext}} 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then [=exception/throw=] a {{TypeError}}. 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then [=exception/throw=] a {{TypeError}}. - 1. If |tensor|.{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/writable}}"] is false, then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|'s {{MLTensor/writable}} returns false, then [=exception/throw=] a {{TypeError}}. 1. If [=validating buffer with descriptor=] given |inputData| and |tensor|.{{MLTensor/[[descriptor]]}} returns false, then [=exception/throw=] a {{TypeError}}. 1. Let |bytes| be the result of [=getting a copy of the bytes held by the buffer source=] given |inputData|. 1. [=Assert=]: |bytes|'s [=byte sequence/length=] is equal to |tensor|.{{MLTensor/[[descriptor]]}}'s [=MLOperandDescriptor/byte length=]. 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: 1. Copy |bytes| to |tensor|.{{MLTensor/[[data]]}}. - Issue(778): Add a mechanism for reporting errors while writing to a tensor. + Issue(778): Add a mechanism for reporting errors while writing to a tensor. 1. Return undefined. +Note: `dispatch()` itself provides no signal that graph execution has completed. Rather, callers should await the results of reading back the output tensors. See [[#api-mlcontext-dispatch-examples]] below. + + ### {{MLContext/opSupportLimits()}} ### {#api-mlcontext-opsupportlimits} The {{MLContext/opSupportLimits()}} exposes level of support that differs across implementations at operator level. Consumers of the WebNN API are encouraged to probe feature support level by using {{MLContext/opSupportLimits()}} to determine the optimal model architecture to be deployed for each target platform. @@ -1605,7 +1622,7 @@ interface MLTensor { : \[[data]] of an [=implementation-defined=] type :: - The bytes backing the {{MLTensor}}. This data may only be accessed or modified from the {{MLTensor/[[context]]}}'s {{MLContext/[[timeline]]}}. + The bytes backing the {{MLTensor}}. This data may only be accessed or modified from the {{MLTensor/[[context]]}}.{{MLContext/[[timeline]]}}. @@ -1617,9 +1634,9 @@ The dataType [=getter steps=] are to return [= The shape [=getter steps=] are to return [=this=]'s [=MLTensor/shape=]. -The readable [=getter steps=] are to return [=this=].{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/readable}}"]. +The readable [=getter steps=] are to return [=this=].{{MLTensor/[[descriptor]]}}.{{MLTensorDescriptor/readable}}. -The writable [=getter steps=] are to return [=this=].{{MLTensor/[[descriptor]]}}["{{MLTensorDescriptor/writable}}"]. +The writable [=getter steps=] are to return [=this=].{{MLTensor/[[descriptor]]}}.{{MLTensorDescriptor/writable}}. ### Creating an {{MLTensor}} ### {#api-mltensor-create} @@ -1649,7 +1666,7 @@ Frees the resources associated with the {{MLTensor}}. This method is idempotent. The destroy() method steps are: 1. Set [=this=].{{MLTensor/[[isDestroyed]]}} to true. - 1. Enqueue the following steps to [=this=].{{MLTensor/[[context]]}}'s {{MLContext/[[timeline]]}}: + 1. Enqueue the following steps to [=this=].{{MLTensor/[[context]]}}.{{MLContext/[[timeline]]}}: 1. Free [=this=].{{MLTensor/[[data]]}}. 1. Return undefined. From 45bf38f840938d85c41eedef6c479f1637f780c3 Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Wed, 20 Nov 2024 11:24:04 -0800 Subject: [PATCH 07/11] handle detaching a buffer during readTensor --- index.bs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/index.bs b/index.bs index 03252759..c77bf839 100644 --- a/index.bs +++ b/index.bs @@ -1250,12 +1250,12 @@ Bring-your-own-buffer variant of {{MLContext/readTensor(tensor)}}. Reads back th 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|.{{MLTensor/[[data]]}}. 1. If that fails, then [=queue an ML task=] with |global| to [=reject=] |promise| with an "{{UnknownError}}" {{DOMException}}, and abort these steps. 1. Otherwise, [=queue an ML task=] with |global| and the following steps: - 1. [=ArrayBuffer/write=] |bytes| to |outputData|. - 1. If that fails, [=reject=] |promise| with an "{{UnknownError}}" {{DOMException}}, and abort these steps. + 1. If |outputData| is [=BufferSource/detached=], [=reject=] |promise| with a {{TypeError}}, and abort these steps. - Note: Writing to |outputData| may fail if the buffer has been detached between when it was [=validating buffer with descriptor|validated=] above and these steps. + Note: [=Validating buffer with descriptor=] above will fail if |outputData| is detached, but it's possible |outputData| may detach between then and now. - 1. Otherwise, [=resolve=] |promise| with {{undefined}}. + 1. [=ArrayBuffer/write=] |bytes| to |outputData|. + 1. [=Resolve=] |promise| with {{undefined}}. 1. Return |promise|. From 71184cae0e2dafe82997af75b55b6cd822737918 Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Wed, 20 Nov 2024 22:18:27 -0800 Subject: [PATCH 08/11] whoopsies --- index.bs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/index.bs b/index.bs index c77bf839..9b7b6d8c 100644 --- a/index.bs +++ b/index.bs @@ -1095,7 +1095,7 @@ Schedules the computational workload of a compiled {{MLGraph}} on the {{MLContex **Returns:** {{undefined}}. -Note: similar to `dispatch()`, `writeTensor()` itself provides no signal that the write has completed. To inspect the contents of a tensor, callers should await the results of reading back the tensor. +Note: `dispatch()` itself provides no signal that graph execution has completed. Rather, callers should await the results of reading back the output tensors. See [[#api-mlcontext-dispatch-examples]] below.
@@ -1289,8 +1289,7 @@ Writes data to the {{MLTensor/[[data]]}} of an {{MLTensor}} on the {{MLContext}} 1. Return undefined.
-Note: `dispatch()` itself provides no signal that graph execution has completed. Rather, callers should await the results of reading back the output tensors. See [[#api-mlcontext-dispatch-examples]] below. - +Note: similar to `dispatch()`, `writeTensor()` itself provides no signal that the write has completed. To inspect the contents of a tensor, callers should await the results of reading back the tensor. ### {{MLContext/opSupportLimits()}} ### {#api-mlcontext-opsupportlimits} The {{MLContext/opSupportLimits()}} exposes level of support that differs across implementations at operator level. Consumers of the WebNN API are encouraged to probe feature support level by using {{MLContext/opSupportLimits()}} to determine the optimal model architecture to be deployed for each target platform. From 2c9ddfb7c0808095cbfcd298c9186d2123c1f9ac Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Thu, 21 Nov 2024 14:31:01 -0800 Subject: [PATCH 09/11] do not use getters in algorithms --- index.bs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/index.bs b/index.bs index 9b7b6d8c..1f74200b 100644 --- a/index.bs +++ b/index.bs @@ -1215,7 +1215,7 @@ Reads back the {{MLTensor/[[data]]}} of an {{MLTensor}} from the {{MLContext}}.{ 1. Let |realm| be [=this=]'s [=relevant realm=]. 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/readable}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|.{{MLTensor/[[descriptor]]}}.{{MLTensorDescriptor/readable}} is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. Let |promise| be [=a new promise=]. 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: 1. Let |bytes| be a [=/byte sequence=] containing a copy of |tensor|.{{MLTensor/[[data]]}}. @@ -1243,7 +1243,7 @@ Bring-your-own-buffer variant of {{MLContext/readTensor(tensor)}}. Reads back th 1. Let |global| be [=this=]'s [=relevant global object=]. 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then return [=a new promise=] [=rejected=] with a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/readable}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. + 1. If |tensor|.{{MLTensor/[[descriptor]]}}.{{MLTensorDescriptor/readable}} is false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. If [=validating buffer with descriptor=] given |outputData| and |tensor|.{{MLTensor/[[descriptor]]}} returns false, then return [=a new promise=] [=rejected=] with a {{TypeError}}. 1. Let |promise| be [=a new promise=]. 1. Enqueue the following steps to |tensor|.{{MLGraph/[[context]]}}.{{MLContext/[[timeline]]}}: @@ -1277,7 +1277,7 @@ Writes data to the {{MLTensor/[[data]]}} of an {{MLTensor}} on the {{MLContext}} 1. If |tensor|.{{MLGraph/[[context]]}} is not [=this=], then [=exception/throw=] a {{TypeError}}. 1. If |tensor|.{{MLTensor/[[isDestroyed]]}} is true, then [=exception/throw=] a {{TypeError}}. - 1. If |tensor|'s {{MLTensor/writable}} returns false, then [=exception/throw=] a {{TypeError}}. + 1. If |tensor|.{{MLTensor/[[descriptor]]}}.{{MLTensorDescriptor/writable}} is false, then [=exception/throw=] a {{TypeError}}. 1. If [=validating buffer with descriptor=] given |inputData| and |tensor|.{{MLTensor/[[descriptor]]}} returns false, then [=exception/throw=] a {{TypeError}}. 1. Let |bytes| be the result of [=getting a copy of the bytes held by the buffer source=] given |inputData|. 1. [=Assert=]: |bytes|'s [=byte sequence/length=] is equal to |tensor|.{{MLTensor/[[descriptor]]}}'s [=MLOperandDescriptor/byte length=]. From b8a7609a3ecbe5880255271ab354571367ba5901 Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Thu, 21 Nov 2024 20:29:35 -0800 Subject: [PATCH 10/11] s/free/release --- index.bs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/index.bs b/index.bs index 1f74200b..01a50da9 100644 --- a/index.bs +++ b/index.bs @@ -1654,7 +1654,7 @@ An {{MLTensor}} is created by its associated {{MLContext}}. ### {{MLTensor/destroy()}} ### {#api-mltensor-destroy} -Frees the resources associated with the {{MLTensor}}. This method is idempotent. +Releases the resources associated with the {{MLTensor}}. This method is idempotent.
**Returns:** {{undefined}}. @@ -1666,11 +1666,11 @@ Frees the resources associated with the {{MLTensor}}. This method is idempotent. 1. Set [=this=].{{MLTensor/[[isDestroyed]]}} to true. 1. Enqueue the following steps to [=this=].{{MLTensor/[[context]]}}.{{MLContext/[[timeline]]}}: - 1. Free [=this=].{{MLTensor/[[data]]}}. + 1. Release [=this=].{{MLTensor/[[data]]}}. 1. Return undefined. -Note: Since no further operations can be enqueued using this tensor, implementations can free any additional resource allocations associated with this tensor. +Note: Since no further operations can be enqueued using this tensor, implementations can free any additional resource allocations associated with this tensor once all previously submitted operations using it are complete. ## {{MLGraphBuilder}} interface ## {#api-mlgraphbuilder} From 493ede36a375a28b2283d7742430d09be95a16fb Mon Sep 17 00:00:00 2001 From: Austin Sullivan Date: Fri, 22 Nov 2024 11:47:57 -0800 Subject: [PATCH 11/11] address more inexorabletash feedback --- index.bs | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/index.bs b/index.bs index 01a50da9..011a6a25 100644 --- a/index.bs +++ b/index.bs @@ -861,7 +861,7 @@ The {{MLContext}} interface represents a global state of neural network compute