Skip to content

Commit

Permalink
WebGPU sample pages that use Insertable Streams (webrtc#1481)
Browse files Browse the repository at this point in the history
* WebGPU sample pages that use Insertable Streams

* Changes to the main index file (webrtc#7)

* Formatting changes (webrtc#8)

* Main (webrtc#9)

* Formatting changes

* Editing info on index page

* Adding errorMsg

* Adding errorMsg (webrtc#10)

* Changing the structure of worker processing (webrtc#11)

* Renaming files (webrtc#12)

* Formatting changes (webrtc#13)

* Formatting (webrtc#14)

* More changes (webrtc#15)

* Adding the break clause (webrtc#16)

* Main (webrtc#17)

* resolving comments

* resolving comments

* Minor changes (webrtc#18)

* Formatted with eslint (webrtc#19)

* Modifying eslint config (webrtc#20)
  • Loading branch information
palak8669 authored Oct 5, 2021
1 parent 821b162 commit 89ef09b
Show file tree
Hide file tree
Showing 8 changed files with 564 additions and 1 deletion.
4 changes: 4 additions & 0 deletions .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,10 @@ module.exports = {
"chrome": true,
"ga": true,
"getPolicyFromBooleans": true,
"importScripts": true,
// From WebGPU specification
"GPUBufferUsage": true,
"GPUTextureUsage": true,
// From Streams specification
"TransformStream": true,
// From WebCodec specification
Expand Down
3 changes: 2 additions & 1 deletion index.html
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,8 @@ <h2 id="capture">Insertable Streams:</h2>
<li><a href="src/content/insertable-streams/video-processing">Video processing using MediaStream Insertable Streams</a></li> (Experimental)
<li><a href="src/content/insertable-streams/audio-processing">Audio processing using MediaStream Insertable Streams</a></li> (Experimental)
<li><a href="src/content/insertable-streams/video-crop">Video cropping using MediaStream Insertable Streams in a Worker</a></li> (Experimental)
</ul>
<li><a href="src/content/insertable-streams/webgpu">Integrations with WebGPU for custom video rendering:</a></li> (Experimental)
</ul>

</section>

Expand Down
23 changes: 23 additions & 0 deletions src/content/insertable-streams/webgpu/css/main.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/

video {
width: 480px;
height: 270px;
}

.output {
width: 960px;
height: 540px;
margin: 0px 0px 0px 0px;
}

.error {
font-size: 20px;
color:red;
}
77 changes: 77 additions & 0 deletions src/content/insertable-streams/webgpu/index.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
<!DOCTYPE html>
<!--
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
-->
<html>

<head>

<meta charset="utf-8">
<meta name="description" content="WebRTC code samples">
<meta name="viewport" content="width=device-width, user-scalable=yes, initial-scale=1, maximum-scale=1">
<meta itemprop="description" content="Client-side WebRTC code samples">
<meta itemprop="image" content="../../../images/webrtc-icon-192x192.png">
<meta itemprop="name" content="WebRTC code samples">
<meta name="mobile-web-app-capable" content="yes">
<meta id="theme-color" name="theme-color" content="#ffffff">

<base target="_blank">

<title>Integrations with WebGPU for custom video rendering</title>

<link rel="icon" sizes="192x192" href="../../../images/webrtc-icon-192x192.png">
<link href="//fonts.googleapis.com/css?family=Roboto:300,400,500,700" rel="stylesheet" type="text/css">
<link rel="stylesheet" href="../../../css/main.css" />
<link rel="stylesheet" href="css/main.css" />

</head>

<body>

<div id="container">
<h1><a href="//webrtc.github.io/samples/" title="WebRTC samples homepage">WebRTC samples</a>
<span>Integrations with WebGPU for custom video rendering</span>
</h1>

<p>This sample shows how to render multiple video streams to canvas using the <a
href="https://github.com/w3c/mediacapture-transform">insertable streams</a> and <a
href="https://gpuweb.github.io/gpuweb/">WebGPU</a> APIs. There are options to either process the
rendering on the main thread or on a worker thread.
</p>
<div id="errorMsg" class="error"></div>
<br>
<div class="box">
<span>Choose type of rendering:</span>
<select id="sourceSelector">
<option selected value="stopped">(stopped)</option>
<option value="main">Main thread</option>
<option value="worker">Worker thread</option>
</select>
</div>

<div>Input:</div>
<video id="inputVideo" class="input" width="480" height="270"></video>
<video id="gumInputVideo" width="480" height="270"></video>
<div>Output:</div>
<div id="outputVideo" class="output"></div>
<p>
<b>Note</b>: This sample is using WebGPU API that is in Origin Trial as
of 2021-09-21 and is available in Chrome M94 if the experimental code is enabled on
the command line with
<code>--enable-unsafe-webgpu</code>.
</p>
<a href="https://github.com/webrtc/samples/tree/gh-pages/src/content/insertable-streams/webgpu"
title="View source for this page on GitHub" id="viewSource">View source on GitHub</a>

</div>

<script src="js/main.js" async></script>
<script src="js/multi_video_main.js" async></script>
<script src="js/multi_video_worker_manager.js" async></script>
</body>

</html>
115 changes: 115 additions & 0 deletions src/content/insertable-streams/webgpu/js/main.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
'use strict';

/* global MediaStreamTrackProcessor, MediaStreamTrackGenerator */
if (typeof MediaStreamTrackProcessor === 'undefined' ||
typeof MediaStreamTrackGenerator === 'undefined') {
const errorMessage = 'Your browser does not support the MediaStreamTrack ' +
'API for Insertable Streams of Media which was shipped in M94.';
document.getElementById('errorMsg').innerText = errorMessage;
console.log(errorMessage);
}

/* global WebGPUTransform */ // defined in multi_video_main.js
/* global WebGPUWorker */ // defined in multi_video_worker_manager.js

let videoElement;

async function getMediaStream(src) {
videoElement = document.getElementById('inputVideo');
videoElement.controls = true;
videoElement.loop = true;
videoElement.muted = true;
videoElement.src = src;
videoElement.load();
videoElement.play();

let sourceStream;
const mediaPromise = new Promise((resolve, reject) => {
videoElement.oncanplay = () => {
if (!resolve || !reject) return;
console.log('Obtaining video capture stream');
if (videoElement.captureStream) {
sourceStream = videoElement.captureStream();
resolve();
} else if (videoElement.mozCaptureStream) {
sourceStream = videoElement.mozCaptureStream();
resolve();
} else {
reject(new Error('Stream capture is not supported'));
}
resolve = null;
reject = null;
};
});
await mediaPromise;
console.log(
'Received source video stream.', sourceStream);
return sourceStream;
}

function getUserMediaStream() {
return navigator.mediaDevices.getUserMedia({
audio: false,
video: {width: 480, height: 270}
}).catch(err => {
throw new Error('Unable to fetch getUserMedia stream ' + err);
});
}

let gpuTransform;
let gumTrack;
let gumVideo;

async function main(sourceType) {
const gumStream = await getUserMediaStream();
gumTrack = gumStream.getVideoTracks()[0];
const gumProcessor = new MediaStreamTrackProcessor({track: gumTrack});

gumVideo = document.getElementById('gumInputVideo');
gumVideo.srcObject = gumStream;
gumVideo.play();

const videoStream = await getMediaStream('../../../video/chrome.webm');
const videoTrack = videoStream.getVideoTracks()[0];
const videoProcessor = new MediaStreamTrackProcessor({track: videoTrack});

if (sourceType === 'main') {
gpuTransform = new WebGPUTransform();
}
if (sourceType === 'worker') {
gpuTransform = new WebGPUWorker();
}
await gpuTransform.init();
await gpuTransform.transform(videoProcessor.readable, gumProcessor.readable);
}

function destroy_source() {
if (videoElement) {
console.log('Stopping source video');
videoElement.pause();
}
if (gumVideo) {
console.log('Stopping gUM stream');
gumVideo.pause();
gumVideo.srcObject = null;
}
if (gumTrack) gumTrack.stop();
}

const sourceSelector = document.getElementById('sourceSelector');

function updateSource() {
if (gpuTransform) {
gpuTransform.destroy();
}
gpuTransform = null;
destroy_source();
const sourceType = sourceSelector.options[sourceSelector.selectedIndex].value;

console.log('New source is', sourceType);
if (sourceType !== 'stopped') {
main(sourceType);
}
}

sourceSelector.oninput = updateSource;
Loading

0 comments on commit 89ef09b

Please sign in to comment.