diff --git a/.eslintrc.js b/.eslintrc.js
index a859b3926..a8f243f53 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -28,6 +28,10 @@ module.exports = {
"chrome": true,
"ga": true,
"getPolicyFromBooleans": true,
+ "importScripts": true,
+ // From WebGPU specification
+ "GPUBufferUsage": true,
+ "GPUTextureUsage": true,
// From Streams specification
"TransformStream": true,
// From WebCodec specification
diff --git a/index.html b/index.html
index b322c7ba1..d752f200f 100644
--- a/index.html
+++ b/index.html
@@ -205,7 +205,8 @@
Insertable Streams:
Video processing using MediaStream Insertable Streams (Experimental)
Audio processing using MediaStream Insertable Streams (Experimental)
Video cropping using MediaStream Insertable Streams in a Worker (Experimental)
-
+ Integrations with WebGPU for custom video rendering: (Experimental)
+
diff --git a/src/content/insertable-streams/webgpu/css/main.css b/src/content/insertable-streams/webgpu/css/main.css
new file mode 100644
index 000000000..63d5c346a
--- /dev/null
+++ b/src/content/insertable-streams/webgpu/css/main.css
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree.
+ */
+
+video {
+ width: 480px;
+ height: 270px;
+}
+
+.output {
+ width: 960px;
+ height: 540px;
+ margin: 0px 0px 0px 0px;
+}
+
+.error {
+ font-size: 20px;
+ color:red;
+}
diff --git a/src/content/insertable-streams/webgpu/index.html b/src/content/insertable-streams/webgpu/index.html
new file mode 100644
index 000000000..f42e0f72b
--- /dev/null
+++ b/src/content/insertable-streams/webgpu/index.html
@@ -0,0 +1,77 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Integrations with WebGPU for custom video rendering
+
+
+
+
+
+
+
+
+
+
+
+
WebRTC samples
+ Integrations with WebGPU for custom video rendering
+
+
+
This sample shows how to render multiple video streams to canvas using the insertable streams and WebGPU APIs. There are options to either process the
+ rendering on the main thread or on a worker thread.
+
+
+
+
+ Choose type of rendering:
+
+ (stopped)
+ Main thread
+ Worker thread
+
+
+
+
Input:
+
+
+
Output:
+
+
+ Note : This sample is using WebGPU API that is in Origin Trial as
+ of 2021-09-21 and is available in Chrome M94 if the experimental code is enabled on
+ the command line with
+ --enable-unsafe-webgpu
.
+
+
View source on GitHub
+
+
+
+
+
+
+
+
+
diff --git a/src/content/insertable-streams/webgpu/js/main.js b/src/content/insertable-streams/webgpu/js/main.js
new file mode 100644
index 000000000..4fc1a8482
--- /dev/null
+++ b/src/content/insertable-streams/webgpu/js/main.js
@@ -0,0 +1,115 @@
+'use strict';
+
+/* global MediaStreamTrackProcessor, MediaStreamTrackGenerator */
+if (typeof MediaStreamTrackProcessor === 'undefined' ||
+ typeof MediaStreamTrackGenerator === 'undefined') {
+ const errorMessage = 'Your browser does not support the MediaStreamTrack ' +
+ 'API for Insertable Streams of Media which was shipped in M94.';
+ document.getElementById('errorMsg').innerText = errorMessage;
+ console.log(errorMessage);
+}
+
+/* global WebGPUTransform */ // defined in multi_video_main.js
+/* global WebGPUWorker */ // defined in multi_video_worker_manager.js
+
+let videoElement;
+
+async function getMediaStream(src) {
+ videoElement = document.getElementById('inputVideo');
+ videoElement.controls = true;
+ videoElement.loop = true;
+ videoElement.muted = true;
+ videoElement.src = src;
+ videoElement.load();
+ videoElement.play();
+
+ let sourceStream;
+ const mediaPromise = new Promise((resolve, reject) => {
+ videoElement.oncanplay = () => {
+ if (!resolve || !reject) return;
+ console.log('Obtaining video capture stream');
+ if (videoElement.captureStream) {
+ sourceStream = videoElement.captureStream();
+ resolve();
+ } else if (videoElement.mozCaptureStream) {
+ sourceStream = videoElement.mozCaptureStream();
+ resolve();
+ } else {
+ reject(new Error('Stream capture is not supported'));
+ }
+ resolve = null;
+ reject = null;
+ };
+ });
+ await mediaPromise;
+ console.log(
+ 'Received source video stream.', sourceStream);
+ return sourceStream;
+}
+
+function getUserMediaStream() {
+ return navigator.mediaDevices.getUserMedia({
+ audio: false,
+ video: {width: 480, height: 270}
+ }).catch(err => {
+ throw new Error('Unable to fetch getUserMedia stream ' + err);
+ });
+}
+
+let gpuTransform;
+let gumTrack;
+let gumVideo;
+
+async function main(sourceType) {
+ const gumStream = await getUserMediaStream();
+ gumTrack = gumStream.getVideoTracks()[0];
+ const gumProcessor = new MediaStreamTrackProcessor({track: gumTrack});
+
+ gumVideo = document.getElementById('gumInputVideo');
+ gumVideo.srcObject = gumStream;
+ gumVideo.play();
+
+ const videoStream = await getMediaStream('../../../video/chrome.webm');
+ const videoTrack = videoStream.getVideoTracks()[0];
+ const videoProcessor = new MediaStreamTrackProcessor({track: videoTrack});
+
+ if (sourceType === 'main') {
+ gpuTransform = new WebGPUTransform();
+ }
+ if (sourceType === 'worker') {
+ gpuTransform = new WebGPUWorker();
+ }
+ await gpuTransform.init();
+ await gpuTransform.transform(videoProcessor.readable, gumProcessor.readable);
+}
+
+function destroy_source() {
+ if (videoElement) {
+ console.log('Stopping source video');
+ videoElement.pause();
+ }
+ if (gumVideo) {
+ console.log('Stopping gUM stream');
+ gumVideo.pause();
+ gumVideo.srcObject = null;
+ }
+ if (gumTrack) gumTrack.stop();
+}
+
+const sourceSelector = document.getElementById('sourceSelector');
+
+function updateSource() {
+ if (gpuTransform) {
+ gpuTransform.destroy();
+ }
+ gpuTransform = null;
+ destroy_source();
+ const sourceType = sourceSelector.options[sourceSelector.selectedIndex].value;
+
+ console.log('New source is', sourceType);
+ if (sourceType !== 'stopped') {
+ main(sourceType);
+ }
+}
+
+sourceSelector.oninput = updateSource;
diff --git a/src/content/insertable-streams/webgpu/js/multi_video_main.js b/src/content/insertable-streams/webgpu/js/multi_video_main.js
new file mode 100644
index 000000000..dd3147eb7
--- /dev/null
+++ b/src/content/insertable-streams/webgpu/js/multi_video_main.js
@@ -0,0 +1,263 @@
+
+'use strict';
+
+const wgslShaders = {
+ vertex: `
+struct VertexInput {
+ [[location(0)]] position : vec3;
+ [[location(1)]] uv : vec2;
+};
+
+struct VertexOutput {
+ [[builtin(position)]] Position : vec4;
+ [[location(0)]] fragUV : vec2;
+};
+
+[[stage(vertex)]]
+fn main(input : VertexInput) -> VertexOutput {
+ var output : VertexOutput;
+ output.Position = vec4(input.position, 1.0);
+ output.fragUV = vec2(-0.5,-0.0) + input.uv;
+ return output;
+}
+`,
+
+ fragment: `
+[[binding(0), group(0)]] var mySampler: sampler;
+[[binding(1), group(0)]] var myTexture: texture_2d;
+
+[[stage(fragment)]]
+fn main([[location(0)]] fragUV : vec2) -> [[location(0)]] vec4 {
+ return textureSample(myTexture, mySampler, fragUV);
+}
+`,
+};
+
+class WebGPUTransform { // eslint-disable-line no-unused-vars
+ constructor() {
+ this.canvas_ = null;
+ this.context_ = null;
+ this.device_ = null;
+ this.renderPipeline_ = null;
+ this.sampler_ = null;
+ this.videoTexture_ = null;
+ this.vertexBuffer_ = null;
+ }
+
+ async init(inputCanvas) {
+ console.log('[WebGPUTransform] Initializing WebGPU.');
+ this.canvas_ = inputCanvas;
+ let errorElement;
+ if (!this.canvas_) {
+ this.canvas_ = document.createElement('canvas');
+ document.getElementById('outputVideo').append(this.canvas_);
+ this.canvas_.width = 960;
+ this.canvas_.height = 540;
+ errorElement = document.getElementById('errorMsg');
+ }
+
+ const canvas = this.canvas_;
+ const context = canvas.getContext('webgpu');
+ if (!context) {
+ const errorMessage = 'Your browser does not support the WebGPU API.' +
+ ' Please see the note at the bottom of the page.';
+ if (errorElement) errorElement.innerText = errorMessage;
+ return errorMessage;
+ }
+ this.context_ = context;
+ const adapter = await navigator.gpu.requestAdapter();
+ const device = await adapter.requestDevice();
+ this.device_ = device;
+ if (!this.device_) {
+ console.log('[WebGPUTransform] requestDevice failed.');
+ return;
+ }
+ const swapChainFormat = 'bgra8unorm';
+
+ const rectVerts = new Float32Array([
+ 1.0, 1.0, 0.0, 1.0, 0.0,
+ 1.0, -1.0, 0.0, 1.0, 1.0,
+ -1.0, -1.0, 0.0, 0.0, 1.0,
+ 1.0, 1.0, 0.0, 1.0, 0.0,
+ -1.0, -1.0, 0.0, 0.0, 1.0,
+ -1.0, 1.0, 0.0, 0.0, 0.0,
+ ]);
+ // Creates a GPU buffer.
+ const vertexBuffer = device.createBuffer({
+ size: rectVerts.byteLength,
+ usage: GPUBufferUsage.VERTEX,
+ mappedAtCreation: true,
+ });
+ // Copies rectVerts to vertexBuffer
+ new Float32Array(vertexBuffer.getMappedRange()).set(rectVerts);
+ vertexBuffer.unmap();
+ this.vertexBuffer_ = vertexBuffer;
+
+ context.configure({
+ device,
+ format: swapChainFormat
+ });
+
+ this.renderPipeline_ = device.createRenderPipeline({
+ vertex: {
+ module: device.createShaderModule({
+ code: wgslShaders.vertex,
+ }),
+ entryPoint: 'main',
+ buffers: [
+ {
+ arrayStride: 20,
+ attributes: [
+ {
+ // position
+ shaderLocation: 0,
+ offset: 0,
+ format: 'float32x3',
+ },
+ {
+ // uv
+ shaderLocation: 1,
+ offset: 12,
+ format: 'float32x2',
+ },
+ ],
+ },
+ ],
+ },
+ fragment: {
+ module: device.createShaderModule({
+ code: wgslShaders.fragment,
+ }),
+ entryPoint: 'main',
+ targets: [
+ {
+ format: swapChainFormat,
+ },
+ ],
+ },
+ primitive: {
+ topology: 'triangle-list',
+ },
+ });
+
+ this.videoTexture_ = device.createTexture({
+ size: [480 * 2, 270 * 2],
+ format: 'rgba8unorm',
+ usage: GPUTextureUsage.COPY_DST | GPUTextureUsage.TEXTURE_BINDING |
+ GPUTextureUsage.RENDER_ATTACHMENT,
+ });
+
+ this.sampler_ = device.createSampler({
+ addressModeU: 'repeat',
+ addressModeV: 'repeat',
+ addressModeW: 'repeat',
+ magFilter: 'linear',
+ minFilter: 'linear',
+ });
+ }
+
+ async copyOnTexture(device, videoTexture, frame, xcorr, ycorr) {
+ if (!frame) {
+ return;
+ }
+ // Using GPUExternalTexture(when it's implemented for Breakout Box frames) will
+ // avoid making extra copies through ImageBitmap.
+ const videoBitmap = await createImageBitmap(frame, {resizeWidth: 480, resizeHeight: 270});
+ device.queue.copyExternalImageToTexture(
+ {source: videoBitmap, origin: {x: 0, y: 0}},
+ {texture: videoTexture, origin: {x: xcorr, y: ycorr}},
+ {
+ // the width of the image being copied
+ width: videoBitmap.width,
+ height: videoBitmap.height,
+ }
+ );
+ videoBitmap.close();
+ frame.close();
+ }
+
+ async renderOnScreen(videoSource, gumSource) {
+ const device = this.device_;
+ const videoTexture = this.videoTexture_;
+ if (!device) {
+ console.log('[WebGPUTransform] device is undefined or null.');
+ return false;
+ }
+
+ const videoPromise = videoSource.read().then(({value}) => {
+ this.copyOnTexture(device, videoTexture, value, 0, 270);
+ });
+ const gumPromise = gumSource.read().then(({value}) => {
+ this.copyOnTexture(device, videoTexture, value, 480, 0);
+ });
+ await Promise.all([videoPromise, gumPromise]);
+
+ if (!this.device_) {
+ console.log('Check if destroy has been called asynchronously.');
+ return false;
+ }
+
+ const uniformBindGroup = device.createBindGroup({
+ layout: this.renderPipeline_.getBindGroupLayout(0),
+ entries: [
+ {
+ binding: 0,
+ resource: this.sampler_,
+ },
+ {
+ binding: 1,
+ resource: videoTexture.createView(),
+ },
+ ],
+ });
+
+ const commandEncoder = device.createCommandEncoder();
+ const textureView = this.context_.getCurrentTexture().createView();
+
+ const renderPassDescriptor = {
+ colorAttachments: [
+ {
+ view: textureView,
+ loadValue: {r: 0.0, g: 0.0, b: 0.0, a: 1.0},
+ storeOp: 'store',
+ },
+ ],
+ };
+ const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
+ passEncoder.setPipeline(this.renderPipeline_);
+ passEncoder.setVertexBuffer(0, this.vertexBuffer_);
+ passEncoder.setBindGroup(0, uniformBindGroup);
+ passEncoder.draw(6, 1, 0, 0);
+ passEncoder.endPass();
+ device.queue.submit([commandEncoder.finish()]);
+ return true;
+ }
+
+
+ async transform(videoStream, gumStream) {
+ const videoSource = videoStream.getReader();
+ const gumSource = gumStream.getReader();
+ while (true) {
+ const rendered = await this.renderOnScreen(videoSource, gumSource);
+ if (!rendered) {
+ break;
+ }
+ }
+ videoSource.cancel();
+ gumSource.cancel();
+ }
+
+ destroy() {
+ if (this.device_) {
+ // Currently being implemented.
+ // await this.device_.destroy();
+ this.device_ = null;
+ this.vertexBuffer_.destroy();
+ this.videoTexture_.destroy();
+ if (this.canvas_.parentNode) {
+ this.canvas_.parentNode.removeChild(this.canvas_);
+ }
+ console.log('[WebGPUTransform] Context destroyed.',);
+ }
+ }
+}
diff --git a/src/content/insertable-streams/webgpu/js/multi_video_worker.js b/src/content/insertable-streams/webgpu/js/multi_video_worker.js
new file mode 100644
index 000000000..728377b63
--- /dev/null
+++ b/src/content/insertable-streams/webgpu/js/multi_video_worker.js
@@ -0,0 +1,23 @@
+importScripts('./multi_video_main.js');
+'use strict';
+
+let mainTransform = null;
+
+/* global WebGPUTransform */ // defined in multi_video_main.js
+
+onmessage = async (event) => {
+ const {operation} = event.data;
+ if (operation === 'init') {
+ mainTransform = new WebGPUTransform();
+ const {canvas} = event.data;
+ const msg = await mainTransform.init(canvas);
+ if (msg) {
+ postMessage({error: msg});
+ } else {
+ postMessage({result: 'Done'});
+ }
+ } else if (operation === 'transform') {
+ const {videoStream, gumStream} = event.data;
+ mainTransform.transform(videoStream, gumStream);
+ }
+};
diff --git a/src/content/insertable-streams/webgpu/js/multi_video_worker_manager.js b/src/content/insertable-streams/webgpu/js/multi_video_worker_manager.js
new file mode 100644
index 000000000..3952c08be
--- /dev/null
+++ b/src/content/insertable-streams/webgpu/js/multi_video_worker_manager.js
@@ -0,0 +1,57 @@
+
+'use strict';
+
+let worker;
+let screenCanvas;
+
+// eslint-disable-next-line no-unused-vars
+class WebGPUWorker {
+ async init() {
+ screenCanvas = document.createElement('canvas');
+ document.getElementById('outputVideo').append(screenCanvas);
+ screenCanvas.width = 960;
+ screenCanvas.height = 540;
+
+ worker = new Worker('./js/multi_video_worker.js');
+ console.log('Created a worker thread.');
+ const offScreen = screenCanvas.transferControlToOffscreen();
+
+ const onMessage = new Promise((resolve, reject) => {
+ worker.addEventListener('message', function handleMsgFromWorker(msg) {
+ if (msg.data.error) {
+ document.getElementById('errorMsg').innerText = msg.data.error;
+ reject(msg.data.error);
+ }
+ if (msg.data.result === 'Done') {
+ resolve();
+ }
+ });
+ });
+ worker.postMessage(
+ {
+ operation: 'init',
+ canvas: offScreen,
+ }, [offScreen]);
+
+ await onMessage;
+ }
+
+ transform(videoStream, gumStream) {
+ if (videoStream && gumStream) {
+ worker.postMessage(
+ {
+ operation: 'transform',
+ videoStream: videoStream,
+ gumStream: gumStream,
+ }, [videoStream, gumStream]);
+ }
+ }
+
+ destroy() {
+ if (screenCanvas.parentNode) {
+ screenCanvas.parentNode.removeChild(screenCanvas);
+ }
+ worker.terminate();
+ console.log('Worker thread destroyed.');
+ }
+}