Skip to content

Commit

Permalink
Cover GPUExternalTexture with WebCam Cases (#3874)
Browse files Browse the repository at this point in the history
* Cover GPUExternalTexture with WebCam Cases

Add utils to capture frame from camera if env supported.
And compared the rendering result with 2d context draw image to cover
uploading webcam frames.

* Update src/webgpu/web_platform/external_texture/video.spec.ts

Co-authored-by: Corentin Wallez <[email protected]>

* Remove extra type declaration.

---------

Co-authored-by: Corentin Wallez <[email protected]>
  • Loading branch information
shaoboyan and Kangz authored Jul 30, 2024
1 parent 6d5f2e7 commit 4cb7a88
Show file tree
Hide file tree
Showing 10 changed files with 232 additions and 24 deletions.
31 changes: 31 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
"@types/offscreencanvas": "^2019.7.2",
"@types/pngjs": "^6.0.3",
"@types/serve-index": "^1.9.3",
"@types/w3c-image-capture": "^1.0.10",
"@typescript-eslint/eslint-plugin": "^6.9.1",
"@typescript-eslint/parser": "^6.9.1",
"@webgpu/types": "^0.1.43",
Expand Down
2 changes: 1 addition & 1 deletion src/webgpu/util/copy_to_texture.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ export const kCopySubrectInfo = [
},
] as const;

export class CopyToTextureUtils extends TextureTestMixin(GPUTest) {
export class TextureUploadingUtils extends TextureTestMixin(GPUTest) {
doFlipY(
sourcePixels: Uint8ClampedArray,
width: number,
Expand Down
4 changes: 2 additions & 2 deletions src/webgpu/web_platform/copyToTexture/ImageBitmap.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ TODO: Test zero-sized copies from all sources (just make sure params cover it) (

import { makeTestGroup } from '../../../common/framework/test_group.js';
import { kTextureFormatInfo, kValidTextureFormatsForCopyE2T } from '../../format_info.js';
import { CopyToTextureUtils, kCopySubrectInfo } from '../../util/copy_to_texture.js';
import { TextureUploadingUtils, kCopySubrectInfo } from '../../util/copy_to_texture.js';

import { kTestColorsAll, kTestColorsOpaque, makeTestColorsTexelView } from './util.js';

export const g = makeTestGroup(CopyToTextureUtils);
export const g = makeTestGroup(TextureUploadingUtils);

g.test('from_ImageData')
.desc(
Expand Down
4 changes: 2 additions & 2 deletions src/webgpu/web_platform/copyToTexture/ImageData.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ copyExternalImageToTexture from ImageData source.

import { makeTestGroup } from '../../../common/framework/test_group.js';
import { kTextureFormatInfo, kValidTextureFormatsForCopyE2T } from '../../format_info.js';
import { CopyToTextureUtils, kCopySubrectInfo } from '../../util/copy_to_texture.js';
import { TextureUploadingUtils, kCopySubrectInfo } from '../../util/copy_to_texture.js';

import { kTestColorsAll, makeTestColorsTexelView } from './util.js';

export const g = makeTestGroup(CopyToTextureUtils);
export const g = makeTestGroup(TextureUploadingUtils);

g.test('from_ImageData')
.desc(
Expand Down
4 changes: 2 additions & 2 deletions src/webgpu/web_platform/copyToTexture/canvas.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ import {
kValidTextureFormatsForCopyE2T,
RegularTextureFormat,
} from '../../format_info.js';
import { CopyToTextureUtils } from '../../util/copy_to_texture.js';
import { TextureUploadingUtils } from '../../util/copy_to_texture.js';
import { CanvasType, kAllCanvasTypes, createCanvas } from '../../util/create_elements.js';
import { TexelCompareOptions } from '../../util/texture/texture_ok.js';

class F extends CopyToTextureUtils {
class F extends TextureUploadingUtils {
init2DCanvasContentWithColorSpace({
width,
height,
Expand Down
4 changes: 2 additions & 2 deletions src/webgpu/web_platform/copyToTexture/image.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ copyExternalImageToTexture from HTMLImageElement source.
import { makeTestGroup } from '../../../common/framework/test_group.js';
import { raceWithRejectOnTimeout } from '../../../common/util/util.js';
import { kTextureFormatInfo, kValidTextureFormatsForCopyE2T } from '../../format_info.js';
import { CopyToTextureUtils, kCopySubrectInfo } from '../../util/copy_to_texture.js';
import { TextureUploadingUtils, kCopySubrectInfo } from '../../util/copy_to_texture.js';

import { kTestColorsOpaque, makeTestColorsTexelView } from './util.js';

Expand All @@ -21,7 +21,7 @@ async function decodeImageFromCanvas(canvas: HTMLCanvasElement): Promise<HTMLIma
return image;
}

export const g = makeTestGroup(CopyToTextureUtils);
export const g = makeTestGroup(TextureUploadingUtils);

g.test('from_image')
.desc(
Expand Down
144 changes: 130 additions & 14 deletions src/webgpu/web_platform/external_texture/video.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,14 @@ TODO(#3193): Test video in BT.2020 color space
`;

import { makeTestGroup } from '../../../common/framework/test_group.js';
import { GPUTest, TextureTestMixin } from '../../gpu_test.js';
import { GPUTest } from '../../gpu_test.js';
import { TextureUploadingUtils } from '../../util/copy_to_texture.js';
import { createCanvas } from '../../util/create_elements.js';
import {
startPlayingAndWaitForVideo,
getVideoFrameFromVideoElement,
getVideoElement,
captureCameraFrame,
convertToUnorm8,
kPredefinedColorSpace,
kVideoNames,
Expand All @@ -26,7 +28,7 @@ const kHeight = 16;
const kWidth = 16;
const kFormat = 'rgba8unorm';

export const g = makeTestGroup(TextureTestMixin(GPUTest));
export const g = makeTestGroup(TextureUploadingUtils);

function createExternalTextureSamplingTestPipeline(
t: GPUTest,
Expand All @@ -37,16 +39,34 @@ function createExternalTextureSamplingTestPipeline(
vertex: {
module: t.device.createShaderModule({
code: `
@vertex fn main(@builtin(vertex_index) VertexIndex : u32) -> @builtin(position) vec4<f32> {
var pos = array<vec4<f32>, 6>(
vec4<f32>( 1.0, 1.0, 0.0, 1.0),
vec4<f32>( 1.0, -1.0, 0.0, 1.0),
vec4<f32>(-1.0, -1.0, 0.0, 1.0),
vec4<f32>( 1.0, 1.0, 0.0, 1.0),
vec4<f32>(-1.0, -1.0, 0.0, 1.0),
vec4<f32>(-1.0, 1.0, 0.0, 1.0)
struct VertexOutput {
@builtin(position) Position : vec4f,
@location(0) fragUV : vec2f,
}
@vertex fn main(@builtin(vertex_index) VertexIndex : u32) -> VertexOutput {
const pos = array(
vec2( 1.0, 1.0),
vec2( 1.0, -1.0),
vec2(-1.0, -1.0),
vec2( 1.0, 1.0),
vec2(-1.0, -1.0),
vec2(-1.0, 1.0),
);
const uv = array(
vec2(1.0, 0.0),
vec2(1.0, 1.0),
vec2(0.0, 1.0),
vec2(1.0, 0.0),
vec2(0.0, 1.0),
vec2(0.0, 0.0),
);
return pos[VertexIndex];
var output : VertexOutput;
output.Position = vec4(pos[VertexIndex], 0.0, 1.0);
output.fragUV = uv[VertexIndex];
return output;
}
`,
}),
Expand All @@ -58,9 +78,9 @@ function createExternalTextureSamplingTestPipeline(
@group(0) @binding(0) var s : sampler;
@group(0) @binding(1) var t : texture_external;
@fragment fn main(@builtin(position) FragCoord : vec4<f32>)
-> @location(0) vec4<f32> {
return textureSampleBaseClampToEdge(t, s, FragCoord.xy / vec2<f32>(16.0, 16.0));
@fragment fn main(@location(0) fragUV : vec2f)
-> @location(0) vec4f {
return textureSampleBaseClampToEdge(t, s, fragUV);
}
`,
}),
Expand Down Expand Up @@ -603,3 +623,99 @@ compute shader, for several combinations of video format, video color spaces and
]);
});
});

g.test('importExternalTexture,cameraCapture')
.desc(
`
Tests that we can import an VideoFrame from webcam into a GPUExternalTexture, sample from it and
compared with 2d canvas rendering result.
`
)
.params(u =>
u //
.combineWithParams(checkNonStandardIsZeroCopyIfAvailable())
.combine('dstColorSpace', kPredefinedColorSpace)
)
.fn(async t => {
const { dstColorSpace } = t.params;

const frame = await captureCameraFrame(t);

if (frame.displayHeight === 0 || frame.displayWidth === 0) {
t.skip('Captured video frame has 0 height or width.');
}

const frameWidth = frame.displayWidth;
const frameHeight = frame.displayHeight;

// Use WebGPU + GPUExternalTexture to render the captured frame.
const colorAttachment = t.createTextureTracked({
format: kFormat,
size: { width: frameWidth, height: frameHeight },
usage: GPUTextureUsage.COPY_SRC | GPUTextureUsage.RENDER_ATTACHMENT,
});

const pipeline = createExternalTextureSamplingTestPipeline(t);
const bindGroup = createExternalTextureSamplingTestBindGroup(
t,
t.params.checkNonStandardIsZeroCopy,
frame,
pipeline,
dstColorSpace
);

const commandEncoder = t.device.createCommandEncoder();
const passEncoder = commandEncoder.beginRenderPass({
colorAttachments: [
{
view: colorAttachment.createView(),
clearValue: { r: 0.0, g: 0.0, b: 0.0, a: 1.0 },
loadOp: 'clear',
storeOp: 'store',
},
],
});
passEncoder.setPipeline(pipeline);
passEncoder.setBindGroup(0, bindGroup);
passEncoder.draw(6);
passEncoder.end();
t.device.queue.submit([commandEncoder.finish()]);

// Use 2d context canvas as expected result.
const canvas = createCanvas(t, 'onscreen', frameWidth, frameHeight);

const canvasContext = canvas.getContext('2d', { colorSpace: dstColorSpace });

if (canvasContext === null) {
t.skip(' onscreen canvas 2d context not available');
}

const ctx = canvasContext as CanvasRenderingContext2D;
ctx.drawImage(frame, 0, 0, frameWidth, frameHeight);

const imageData = ctx.getImageData(0, 0, frameWidth, frameHeight, {
colorSpace: dstColorSpace,
});

const expectedView = t.getExpectedDstPixelsFromSrcPixels({
srcPixels: imageData.data,
srcOrigin: [0, 0],
srcSize: [frameWidth, frameHeight],
dstOrigin: [0, 0],
dstSize: [frameWidth, frameHeight],
subRectSize: [frameWidth, frameHeight],
format: 'rgba8unorm',
flipSrcBeforeCopy: false,
srcDoFlipYDuringCopy: false,
conversion: {
srcPremultiplied: false,
dstPremultiplied: true,
},
});

t.expectTexelViewComparisonIsOkInTexture({ texture: colorAttachment }, expectedView, [
frameWidth,
frameHeight,
1,
]);
});
61 changes: 60 additions & 1 deletion src/webgpu/web_platform/util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,9 @@ export async function getVideoFrameFromVideoElement(

return raceWithRejectOnTimeout(
new Promise<VideoFrame>(resolve => {
const videoTrack: MediaStreamVideoTrack = video.captureStream().getVideoTracks()[0];
const videoTrack: MediaStreamVideoTrack = video
.captureStream()
.getVideoTracks()[0] as MediaStreamVideoTrack;
const trackProcessor: MediaStreamTrackProcessor<VideoFrame> = new MediaStreamTrackProcessor({
track: videoTrack,
});
Expand Down Expand Up @@ -547,3 +549,60 @@ function callbackHelper(
const promise = raceWithRejectOnTimeout(promiseWithoutTimeout, 2000, timeoutMessage);
return { promise, callbackAndResolve: callbackAndResolve! };
}

/**
* Create VideoFrame from camera captured frame. Check whether browser environment has
* camera supported.
* Returns a webcodec VideoFrame.
*
* @param test: GPUTest that requires getting VideoFrame
*
*/
export async function captureCameraFrame(test: GPUTest): Promise<VideoFrame> {
if (
typeof navigator.mediaDevices === 'undefined' ||
typeof navigator.mediaDevices.getUserMedia === 'undefined'
) {
test.skip("Browser doesn't support capture frame from camera.");
}

const stream = await navigator.mediaDevices.getUserMedia({ video: true });
const track = stream.getVideoTracks()[0] as MediaStreamVideoTrack;

if (!track) {
test.skip("Doesn't have valid camera captured stream for testing.");
}

// Use MediaStreamTrackProcessor and ReadableStream to generate video frame directly.
if (typeof MediaStreamTrackProcessor !== 'undefined') {
const trackProcessor = new MediaStreamTrackProcessor({ track });
const reader = trackProcessor.readable.getReader();
const result = await reader.read();
if (result.done) {
test.skip('MediaStreamTrackProcessor: Cannot get valid frame from readable stream.');
}

return result.value;
}

// Fallback to ImageCapture if MediaStreamTrackProcessor not supported. Using grabFrame() to
// generate imageBitmap and creating video frame from it.
if (typeof ImageCapture !== 'undefined') {
const imageCapture = new ImageCapture(track);
const imageBitmap = await imageCapture.grabFrame();
return new VideoFrame(imageBitmap);
}

// Fallback to using HTMLVideoElement to do capture.
if (typeof HTMLVideoElement === 'undefined') {
test.skip('Try to use HTMLVideoElement do capture but HTMLVideoElement not available.');
}

const video = document.createElement('video');
video.srcObject = stream;

const frame = await getVideoFrameFromVideoElement(test, video);
test.trackForCleanup(frame);

return frame;
}
1 change: 1 addition & 0 deletions tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
"@webgpu/types",
// Required for standalone.ts
"@types/jquery",
"@types/w3c-image-capture",
],
/* Output options */
"noEmit": true,
Expand Down

0 comments on commit 4cb7a88

Please sign in to comment.