Skip to content

Commit

Permalink
Add Occlusion Query Sample (#431)
Browse files Browse the repository at this point in the history
* Add Occlusion Query Sample

This is the simplest thing I could think of to
demonstrate how to use Occlusion Queries.
  • Loading branch information
greggman authored Jul 11, 2024
1 parent 464b104 commit 5a73b14
Show file tree
Hide file tree
Showing 5 changed files with 426 additions and 0 deletions.
40 changes: 40 additions & 0 deletions sample/occlusionQuery/index.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<title>webgpu-samples: wireframe</title>
<style>
:root {
color-scheme: light dark;
}
html, body {
margin: 0; /* remove default margin */
height: 100%; /* make body fill the browser window */
display: flex;
place-content: center center;
}
canvas {
width: 600px;
height: 600px;
max-width: 100%;
display: block;
}
#info {
position: absolute;
left: 0;
top: 0;
padding: 1em;
margin: 0;
width: 12em;
height: 1.25em;
}
</style>
<script defer src="main.js" type="module"></script>
<script defer type="module" src="../../js/iframe-helper.js"></script>
</head>
<body>
<canvas></canvas>
<pre id="info"></pre>
</body>
</html>
346 changes: 346 additions & 0 deletions sample/occlusionQuery/main.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,346 @@
import { GUI } from 'dat.gui';
import { mat4 } from 'wgpu-matrix';
import solidColorLitWGSL from './solidColorLit.wgsl';

const settings = {
animate: true,
};
const gui = new GUI();
gui.add(settings, 'animate');

type TypedArrayView =
| Int8Array
| Uint8Array
| Int16Array
| Uint16Array
| Int32Array
| Uint32Array
| Float32Array
| Float64Array;

export type TypedArrayConstructor =
| Int8ArrayConstructor
| Uint8ArrayConstructor
| Int16ArrayConstructor
| Uint16ArrayConstructor
| Int32ArrayConstructor
| Uint32ArrayConstructor
| Float32ArrayConstructor
| Float64ArrayConstructor;

const info = document.querySelector('#info');

const adapter = await navigator.gpu.requestAdapter();
const device = await adapter.requestDevice();
const canvas = document.querySelector('canvas') as HTMLCanvasElement;
const context = canvas.getContext('webgpu') as GPUCanvasContext;
const devicePixelRatio = window.devicePixelRatio;
canvas.width = canvas.clientWidth * devicePixelRatio;
canvas.height = canvas.clientHeight * devicePixelRatio;
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
context.configure({
device,
format: presentationFormat,
alphaMode: 'premultiplied',
});
const depthFormat = 'depth24plus';

const module = device.createShaderModule({
code: solidColorLitWGSL,
});

const pipeline = device.createRenderPipeline({
layout: 'auto',
vertex: {
module,
buffers: [
{
arrayStride: 6 * 4, // 3x2 floats, 4 bytes each
attributes: [
{ shaderLocation: 0, offset: 0, format: 'float32x3' }, // position
{ shaderLocation: 1, offset: 12, format: 'float32x3' }, // normal
],
},
],
},
fragment: {
module,
targets: [{ format: presentationFormat }],
},
primitive: {
topology: 'triangle-list',
cullMode: 'back',
},
depthStencil: {
depthWriteEnabled: true,
depthCompare: 'less',
format: depthFormat,
},
});

// prettier-ignore
const cubePositions = [
{ position: [-1, 0, 0], id: '🟥', color: [1, 0, 0, 1] },
{ position: [ 1, 0, 0], id: '🟨', color: [1, 1, 0, 1] },
{ position: [ 0, -1, 0], id: '🟩', color: [0, 0.5, 0, 1] },
{ position: [ 0, 1, 0], id: '🟧', color: [1, 0.6, 0, 1] },
{ position: [ 0, 0, -1], id: '🟦', color: [0, 0, 1, 1] },
{ position: [ 0, 0, 1], id: '🟪', color: [0.5, 0, 0.5, 1] },
];

const objectInfos = cubePositions.map(({ position, id, color }) => {
const uniformBufferSize = (2 * 16 + 3 + 1 + 4) * 4;
const uniformBuffer = device.createBuffer({
size: uniformBufferSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});
const uniformValues = new Float32Array(uniformBufferSize / 4);
const worldViewProjection = uniformValues.subarray(0, 16);
const worldInverseTranspose = uniformValues.subarray(16, 32);
const colorValue = uniformValues.subarray(32, 36);

colorValue.set(color);

const bindGroup = device.createBindGroup({
layout: pipeline.getBindGroupLayout(0),
entries: [{ binding: 0, resource: { buffer: uniformBuffer } }],
});

return {
id,
position: position.map((v) => v * 10),
bindGroup,
uniformBuffer,
uniformValues,
worldInverseTranspose,
worldViewProjection,
};
});

const occlusionQuerySet = device.createQuerySet({
type: 'occlusion',
count: objectInfos.length,
});

const resolveBuffer = device.createBuffer({
label: 'resolveBuffer',
// Query results are 64bit unsigned integers.
size: objectInfos.length * BigUint64Array.BYTES_PER_ELEMENT,
usage: GPUBufferUsage.QUERY_RESOLVE | GPUBufferUsage.COPY_SRC,
});

const resultBuffer = device.createBuffer({
label: 'resultBuffer',
size: resolveBuffer.size,
usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ,
});

function createBufferWithData(
device: GPUDevice,
data: TypedArrayView,
usage: GPUBufferUsageFlags,
label: string
) {
const buffer = device.createBuffer({
label,
size: data.byteLength,
usage,
mappedAtCreation: true,
});
const dst = new (data.constructor as TypedArrayConstructor)(
buffer.getMappedRange()
);
dst.set(data);
buffer.unmap();
return buffer;
}

// prettier-ignore
const vertexData = new Float32Array([
// position normal
1, 1, -1, 1, 0, 0,
1, 1, 1, 1, 0, 0,
1, -1, 1, 1, 0, 0,
1, -1, -1, 1, 0, 0,
-1, 1, 1, -1, 0, 0,
-1, 1, -1, -1, 0, 0,
-1, -1, -1, -1, 0, 0,
-1, -1, 1, -1, 0, 0,
-1, 1, 1, 0, 1, 0,
1, 1, 1, 0, 1, 0,
1, 1, -1, 0, 1, 0,
-1, 1, -1, 0, 1, 0,
-1, -1, -1, 0, -1, 0,
1, -1, -1, 0, -1, 0,
1, -1, 1, 0, -1, 0,
-1, -1, 1, 0, -1, 0,
1, 1, 1, 0, 0, 1,
-1, 1, 1, 0, 0, 1,
-1, -1, 1, 0, 0, 1,
1, -1, 1, 0, 0, 1,
-1, 1, -1, 0, 0, -1,
1, 1, -1, 0, 0, -1,
1, -1, -1, 0, 0, -1,
-1, -1, -1, 0, 0, -1,
]);
// prettier-ignore
const indices = new Uint16Array([
0, 1, 2, 0, 2, 3, // +x face
4, 5, 6, 4, 6, 7, // -x face
8, 9, 10, 8, 10, 11, // +y face
12, 13, 14, 12, 14, 15, // -y face
16, 17, 18, 16, 18, 19, // +z face
20, 21, 22, 20, 22, 23, // -z face
]);

const vertexBuffer = createBufferWithData(
device,
vertexData,
GPUBufferUsage.VERTEX,
'vertexBuffer'
);
const indicesBuffer = createBufferWithData(
device,
indices,
GPUBufferUsage.INDEX,
'indexBuffer'
);

const renderPassDescriptor: GPURenderPassDescriptor = {
colorAttachments: [
{
view: undefined, // Assigned later
clearValue: { r: 0.5, g: 0.5, b: 0.5, a: 1.0 },
loadOp: 'clear',
storeOp: 'store',
},
],
depthStencilAttachment: {
view: undefined, // Assigned later
depthClearValue: 1.0,
depthLoadOp: 'clear',
depthStoreOp: 'store',
},
occlusionQuerySet,
};

const lerp = (a: number, b: number, t: number) => a + (b - a) * t;
const lerpV = (a: number[], b: number[], t: number) =>
a.map((v, i) => lerp(v, b[i], t));
const pingPongSine = (t: number) => Math.sin(t * Math.PI * 2) * 0.5 + 0.5;

let depthTexture: GPUTexture | undefined;

let time = 0;
let then = 0;
function render(now: number) {
now *= 0.001; // convert to seconds
const deltaTime = now - then;
then = now;

if (settings.animate) {
time += deltaTime;
}

const projection = mat4.perspective(
(30 * Math.PI) / 180,
canvas.clientWidth / canvas.clientHeight,
0.5,
100
);

const m = mat4.identity();
mat4.rotateX(m, time, m);
mat4.rotateY(m, time * 0.7, m);
mat4.translate(m, lerpV([0, 0, 5], [0, 0, 40], pingPongSine(time * 0.2)), m);
const view = mat4.inverse(m);
const viewProjection = mat4.multiply(projection, view);

const canvasTexture = context.getCurrentTexture();
if (
!depthTexture ||
depthTexture.width !== canvasTexture.width ||
depthTexture.height !== canvasTexture.height
) {
if (depthTexture) {
depthTexture.destroy();
}

depthTexture = device.createTexture({
size: canvasTexture, // canvasTexture has width, height, and depthOrArrayLayers properties
format: depthFormat,
usage: GPUTextureUsage.RENDER_ATTACHMENT,
});
}

const colorTexture = context.getCurrentTexture();
renderPassDescriptor.colorAttachments[0].view = colorTexture.createView();
renderPassDescriptor.depthStencilAttachment.view = depthTexture.createView();

const encoder = device.createCommandEncoder();
const pass = encoder.beginRenderPass(renderPassDescriptor);
pass.setPipeline(pipeline);

objectInfos.forEach(
(
{
bindGroup,
uniformBuffer,
uniformValues,
worldViewProjection,
worldInverseTranspose,
position,
},
i
) => {
const world = mat4.translation(position);
mat4.transpose(mat4.inverse(world), worldInverseTranspose);
mat4.multiply(viewProjection, world, worldViewProjection);

device.queue.writeBuffer(uniformBuffer, 0, uniformValues);

pass.setBindGroup(0, bindGroup);
pass.setVertexBuffer(0, vertexBuffer);
pass.setIndexBuffer(indicesBuffer, 'uint16');
pass.beginOcclusionQuery(i);
pass.drawIndexed(indices.length);
pass.endOcclusionQuery();
}
);

pass.end();
encoder.resolveQuerySet(
occlusionQuerySet,
0,
objectInfos.length,
resolveBuffer,
0
);
if (resultBuffer.mapState === 'unmapped') {
encoder.copyBufferToBuffer(
resolveBuffer,
0,
resultBuffer,
0,
resultBuffer.size
);
}

device.queue.submit([encoder.finish()]);

if (resultBuffer.mapState === 'unmapped') {
resultBuffer.mapAsync(GPUMapMode.READ).then(() => {
const results = new BigUint64Array(resultBuffer.getMappedRange()).slice();
resultBuffer.unmap();

const visible = objectInfos
.filter((_, i) => results[i])
.map(({ id }) => id)
.join('');
info.textContent = `visible: ${visible}`;
});
}

requestAnimationFrame(render);
}
requestAnimationFrame(render);
8 changes: 8 additions & 0 deletions sample/occlusionQuery/meta.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
export default {
name: 'Occlusion Query',
description: `
This example demonstrates using Occlusion Queries.
`,
filename: __DIRNAME__,
sources: [{ path: 'main.ts' }, { path: 'solidColorLit.wgsl' }],
};
Loading

0 comments on commit 5a73b14

Please sign in to comment.