Skip to content

Commit

Permalink
Add Video Uploading with WebCodecs sample
Browse files Browse the repository at this point in the history
  • Loading branch information
beaufortfrancois committed Apr 7, 2023
1 parent fd1ccb2 commit 245d806
Show file tree
Hide file tree
Showing 5 changed files with 189 additions and 0 deletions.
14 changes: 14 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"export": "next export"
},
"dependencies": {
"@types/dom-mediacapture-transform": "^0.1.5",
"codemirror": "^5.58.2",
"dat.gui": "^0.7.6",
"file-loader": "^6.2.0",
Expand Down
1 change: 1 addition & 0 deletions src/pages/samples/[slug].tsx
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ export const pages = {
computeBoids: dynamic(() => import('../../sample/computeBoids/main')),
animometer: dynamic(() => import('../../sample/animometer/main')),
videoUploading: dynamic(() => import('../../sample/videoUploading/main')),
videoUploadingWebCodecs: dynamic(() => import('../../sample/videoUploadingWebCodecs/main')),
imageBlur: dynamic(() => import('../../sample/imageBlur/main')),
shadowMapping: dynamic(() => import('../../sample/shadowMapping/main')),
reversedZ: dynamic(() => import('../../sample/reversedZ/main')),
Expand Down
169 changes: 169 additions & 0 deletions src/sample/videoUploadingWebCodecs/main.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
import { makeSample, SampleInit } from '../../components/SampleLayout';

import fullscreenTexturedQuadWGSL from '../../shaders/fullscreenTexturedQuad.wgsl';
import sampleExternalTextureWGSL from '../../shaders/sampleExternalTexture.frag.wgsl';

const init: SampleInit = async ({ canvas, pageState }) => {
// Set video element
const video = document.createElement('video');
video.loop = true;
video.autoplay = true;
video.muted = true;
video.src = new URL(
'../../../assets/video/pano.webm',
import.meta.url
).toString();
await video.play();

const adapter = await navigator.gpu.requestAdapter();
const device = await adapter.requestDevice();

if (!pageState.active) return;

const context = canvas.getContext('webgpu') as GPUCanvasContext;
const devicePixelRatio = window.devicePixelRatio || 1;
canvas.width = canvas.clientWidth * devicePixelRatio;
canvas.height = canvas.clientHeight * devicePixelRatio;
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();

context.configure({
device,
format: presentationFormat,
alphaMode: 'premultiplied',
});

const pipeline = device.createRenderPipeline({
layout: 'auto',
vertex: {
module: device.createShaderModule({
code: fullscreenTexturedQuadWGSL,
}),
entryPoint: 'vert_main',
},
fragment: {
module: device.createShaderModule({
code: sampleExternalTextureWGSL,
}),
entryPoint: 'main',
targets: [
{
format: presentationFormat,
},
],
},
primitive: {
topology: 'triangle-list',
},
});

const sampler = device.createSampler({
magFilter: 'linear',
minFilter: 'linear',
});

function getVideoFrameFromVideoElement(video) {
return new Promise((resolve) => {
const videoTrack = video.captureStream().getVideoTracks()[0];
const trackProcessor = new MediaStreamTrackProcessor({
track: videoTrack,
});
const transformer = new TransformStream({
transform(videoFrame) {
videoTrack.stop();
resolve(videoFrame);
},
flush(controller) {
controller.terminate();
},
});
const trackGenerator = new MediaStreamTrackGenerator({
kind: 'video',
});
trackProcessor.readable
.pipeThrough(transformer)
.pipeTo(trackGenerator.writable);
});
}

async function frame() {
// Sample is no longer the active page.
if (!pageState.active) return;

const videoFrame = await getVideoFrameFromVideoElement(video);

const uniformBindGroup = device.createBindGroup({
layout: pipeline.getBindGroupLayout(0),
entries: [
{
binding: 1,
resource: sampler,
},
{
binding: 2,
resource: device.importExternalTexture({
source: videoFrame as HTMLVideoElement,
}),
},
],
});

const commandEncoder = device.createCommandEncoder();
const textureView = context.getCurrentTexture().createView();

const renderPassDescriptor: GPURenderPassDescriptor = {
colorAttachments: [
{
view: textureView,
clearValue: { r: 0.0, g: 0.0, b: 0.0, a: 1.0 },
loadOp: 'clear',
storeOp: 'store',
},
],
};

const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
passEncoder.setPipeline(pipeline);
passEncoder.setBindGroup(0, uniformBindGroup);
passEncoder.draw(6, 1, 0, 0);
passEncoder.end();
device.queue.submit([commandEncoder.finish()]);

if ('requestVideoFrameCallback' in video) {
video.requestVideoFrameCallback(frame);
} else {
requestAnimationFrame(frame);
}
}

if ('requestVideoFrameCallback' in video) {
video.requestVideoFrameCallback(frame);
} else {
requestAnimationFrame(frame);
}
};

const VideoUploading: () => JSX.Element = () =>
makeSample({
name: 'Video Uploading with WebCodecs',
description: 'This example shows how to upload video frame from WebCodecs to WebGPU.',
init,
sources: [
{
name: __filename.substring(__dirname.length + 1),
contents: __SOURCE__,
},
{
name: '../../shaders/fullscreenTexturedQuad.wgsl',
contents: fullscreenTexturedQuadWGSL,
editable: true,
},
{
name: '../../shaders/sampleExternalTexture.wgsl',
contents: sampleExternalTextureWGSL,
editable: true,
},
],
filename: __filename,
});

export default VideoUploading;
4 changes: 4 additions & 0 deletions src/types.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ declare module '*.module.css' {
interface HTMLCanvasElement extends HTMLElement {
getContext(contextId: 'webgpu'): GPUPresentationContext | null;
}
interface GPUExternalTextureDescriptor extends GPUObjectDescriptorBase {
source: HTMLVideoElement | VideoFrame;
colorSpace?: PredefinedColorSpace;
}

declare const __SOURCE__: string;

Expand Down

0 comments on commit 245d806

Please sign in to comment.