Skip to content

Commit

Permalink
WGSL textureLoad tests for external texture (#3891)
Browse files Browse the repository at this point in the history
  • Loading branch information
greggman authored Aug 6, 2024
1 parent 28935e3 commit 5b1e902
Show file tree
Hide file tree
Showing 2 changed files with 104 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ import {
generateTextureBuiltinInputs2D,
generateTextureBuiltinInputs3D,
Dimensionality,
createVideoFrameWithRandomDataAndGetTexels,
} from './texture_utils.js';
import { generateCoordBoundaries } from './utils.js';

const kTestableColorFormats = [...kEncodableTextureFormats, ...kCompressedTextureFormats] as const;

Expand Down Expand Up @@ -469,13 +469,57 @@ fn textureLoad(t: texture_external, coords: vec2<C>) -> vec4<f32>
Parameters:
* t: The sampled texture to read from
* coords: The 0-based texel coordinate
* coords: The 0-based texel coordinate.
`
)
.paramsSubcasesOnly(u =>
u.combine('C', ['i32', 'u32'] as const).combine('coords', generateCoordBoundaries(2))
u
.combine('samplePoints', kSamplePointMethods)
.combine('C', ['i32', 'u32'] as const)
.combine('L', ['i32', 'u32'] as const)
)
.unimplemented();
.fn(async t => {
const { samplePoints, C, L } = t.params;

const size = [8, 8, 1];

// Note: external texture doesn't use this descriptor.
// It's used to pass to the softwareTextureRead functions.
const descriptor: GPUTextureDescriptor = {
format: 'rgba8unorm',
size,
usage: GPUTextureUsage.COPY_DST,
};
const { texels, videoFrame } = createVideoFrameWithRandomDataAndGetTexels(descriptor.size);
const texture = t.device.importExternalTexture({ source: videoFrame });

const calls: TextureCall<vec2>[] = generateTextureBuiltinInputs2D(50, {
method: samplePoints,
descriptor,
hashInputs: [samplePoints, C, L],
}).map(({ coords }) => {
return {
builtin: 'textureLoad',
coordType: C === 'i32' ? 'i' : 'u',
coords: normalizedCoordToTexelLoadTestCoord(descriptor, 0, C, coords),
};
});

const textureType = 'texture_external';
const viewDescriptor = {};
const sampler = undefined;
const results = await doTextureCalls(t, texture, viewDescriptor, textureType, sampler, calls);
const res = await checkCallResults(
t,
{ texels, descriptor, viewDescriptor },
textureType,
sampler,
calls,
results
);
t.expectOK(res);
videoFrame.close();
});

g.test('arrayed')
.specURL('https://www.w3.org/TR/WGSL/#textureload')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1798,6 +1798,48 @@ function valueIfAllComponentsAreEqual(
return s.size === 1 ? s.values().next().value : undefined;
}

/**
* Creates a VideoFrame with random data and a TexelView with the same data.
*/
export function createVideoFrameWithRandomDataAndGetTexels(textureSize: GPUExtent3D) {
const size = reifyExtent3D(textureSize);
assert(size.depthOrArrayLayers === 1);

// Fill ImageData with random values.
const imageData = new ImageData(size.width, size.height);
const data = imageData.data;
const asU32 = new Uint32Array(data.buffer);
for (let i = 0; i < asU32.length; ++i) {
asU32[i] = hashU32(i);
}

// Put the ImageData into a canvas and make a VideoFrame
const canvas = new OffscreenCanvas(size.width, size.height);
const ctx = canvas.getContext('2d')!;
ctx.putImageData(imageData, 0, 0);
const videoFrame = new VideoFrame(canvas, { timestamp: 0 });

// Premultiply the ImageData
for (let i = 0; i < data.length; i += 4) {
const alpha = data[i + 3] / 255;
data[i + 0] = data[i + 0] * alpha;
data[i + 1] = data[i + 1] * alpha;
data[i + 2] = data[i + 2] * alpha;
}

// Create a TexelView from the premultiplied ImageData
const texels = [
TexelView.fromTextureDataByReference('rgba8unorm', data, {
bytesPerRow: size.width * 4,
rowsPerImage: size.height,
subrectOrigin: [0, 0, 0],
subrectSize: size,
}),
];

return { videoFrame, texels };
}

const kFaceNames = ['+x', '-x', '+y', '-y', '+z', '-z'] as const;

/**
Expand Down Expand Up @@ -2831,7 +2873,7 @@ const s_deviceToPipelines = new WeakMap<GPUDevice, Map<string, GPURenderPipeline
*/
export async function doTextureCalls<T extends Dimensionality>(
t: GPUTest,
gpuTexture: GPUTexture,
gpuTexture: GPUTexture | GPUExternalTexture,
viewDescriptor: GPUTextureViewDescriptor,
textureType: string,
sampler: GPUSamplerDescriptor | undefined,
Expand Down Expand Up @@ -2869,9 +2911,12 @@ export async function doTextureCalls<T extends Dimensionality>(
});
t.device.queue.writeBuffer(dataBuffer, 0, new Uint32Array(data));

const { resultType, resultFormat, componentType } = textureType.includes('depth')
? ({ resultType: 'f32', resultFormat: 'rgba32float', componentType: 'f32' } as const)
: getTextureFormatTypeInfo(gpuTexture.format);
const { resultType, resultFormat, componentType } =
gpuTexture instanceof GPUExternalTexture
? ({ resultType: 'vec4f', resultFormat: 'rgba32float', componentType: 'f32' } as const)
: textureType.includes('depth')
? ({ resultType: 'f32', resultFormat: 'rgba32float', componentType: 'f32' } as const)
: getTextureFormatTypeInfo(gpuTexture.format);
const returnType = `vec4<${componentType}>`;

const rtWidth = 256;
Expand Down Expand Up @@ -2936,7 +2981,13 @@ ${body}
const bindGroup = t.device.createBindGroup({
layout: pipeline.getBindGroupLayout(0),
entries: [
{ binding: 0, resource: gpuTexture.createView(viewDescriptor) },
{
binding: 0,
resource:
gpuTexture instanceof GPUExternalTexture
? gpuTexture
: gpuTexture.createView(viewDescriptor),
},
...(sampler ? [{ binding: 1, resource: gpuSampler! }] : []),
{ binding: 2, resource: { buffer: dataBuffer } },
],
Expand Down

0 comments on commit 5b1e902

Please sign in to comment.