Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add timestamp query example #472

Merged
merged 5 commits into from
Nov 14, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions sample/timestampQuery/index.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<title>webgpu-samples: timestampQuery</title>
<style>
:root {
color-scheme: light dark;
}
html, body {
margin: 0; /* remove default margin */
height: 100%; /* make body fill the browser window */
display: flex;
place-content: center center;
}
canvas {
width: 600px;
height: 600px;
max-width: 100%;
display: block;
}
</style>
<script defer src="main.js" type="module"></script>
<script defer type="module" src="../../js/iframe-helper.js"></script>
</head>
<body>
<canvas></canvas>
</body>
</html>
307 changes: 307 additions & 0 deletions sample/timestampQuery/main.ts
eliemichel marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -0,0 +1,307 @@
import { GUI } from 'dat.gui';
eliemichel marked this conversation as resolved.
Show resolved Hide resolved
import { mat4, vec3 } from 'wgpu-matrix';

import {
cubeVertexArray,
cubeVertexSize,
cubeUVOffset,
cubePositionOffset,
cubeVertexCount,
} from '../../meshes/cube';

import basicVertWGSL from '../../shaders/basic.vert.wgsl';
import sampleTextureMixColorWGSL from '../../shaders/red.frag.wgsl';
import { quitIfWebGPUNotAvailable, fail } from '../util';

const canvas = document.querySelector('canvas') as HTMLCanvasElement;
const adapter = await navigator.gpu?.requestAdapter();
if (!adapter.features.has("timestamp-query")) {
fail('WebGPU timestamp queries are not supported on this system');
eliemichel marked this conversation as resolved.
Show resolved Hide resolved
}
const device = await adapter?.requestDevice({
// We request a device that has support for timestamp queries
requiredFeatures: [ "timestamp-query" ],
});
quitIfWebGPUNotAvailable(adapter, device);

const perfDisplayContainer = document.createElement('div');
perfDisplayContainer.style.color = 'white';
perfDisplayContainer.style.background = 'black';
perfDisplayContainer.style.position = 'absolute';
perfDisplayContainer.style.top = '10px';
perfDisplayContainer.style.left = '10px';

const perfDisplay = document.createElement('pre');
perfDisplayContainer.appendChild(perfDisplay);
if (canvas.parentNode) {
canvas.parentNode.appendChild(perfDisplayContainer);
} else {
console.error('canvas.parentNode is null');
}

const context = canvas.getContext('webgpu') as GPUCanvasContext;

const devicePixelRatio = window.devicePixelRatio;
canvas.width = canvas.clientWidth * devicePixelRatio;
canvas.height = canvas.clientHeight * devicePixelRatio;
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();

context.configure({
device,
format: presentationFormat,
});

// Create timestamp queries
const timestampQuerySet = device.createQuerySet({
type: "timestamp",
count: 2, // begin and end
});

// Create a buffer where to store the result of GPU queries
const timestampBufferSize = 2 * 8; // timestamps are uint64
const timestampBuffer = device.createBuffer({
size: timestampBufferSize,
usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.QUERY_RESOLVE,
});

// Create a buffer to map the result back to the CPU
const timestampMapBuffer = device.createBuffer({
size: timestampBufferSize,
usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ,
});

// Create a vertex buffer from the cube data.
const verticesBuffer = device.createBuffer({
size: cubeVertexArray.byteLength,
usage: GPUBufferUsage.VERTEX,
mappedAtCreation: true,
});
new Float32Array(verticesBuffer.getMappedRange()).set(cubeVertexArray);
verticesBuffer.unmap();

const pipeline = device.createRenderPipeline({
layout: 'auto',
vertex: {
module: device.createShaderModule({
code: basicVertWGSL,
}),
buffers: [
{
arrayStride: cubeVertexSize,
attributes: [
{
// position
shaderLocation: 0,
offset: cubePositionOffset,
format: 'float32x4',
},
{
// uv
shaderLocation: 1,
offset: cubeUVOffset,
format: 'float32x2',
},
],
},
],
},
fragment: {
module: device.createShaderModule({
code: sampleTextureMixColorWGSL,
}),
targets: [
{
format: presentationFormat,
},
],
},
primitive: {
topology: 'triangle-list',

// Backface culling since the cube is solid piece of geometry.
// Faces pointing away from the camera will be occluded by faces
// pointing toward the camera.
cullMode: 'back',
},

// Enable depth testing so that the fragment closest to the camera
// is rendered in front.
depthStencil: {
depthWriteEnabled: true,
depthCompare: 'less',
format: 'depth24plus',
},
});

const depthTexture = device.createTexture({
size: [canvas.width, canvas.height],
format: 'depth24plus',
usage: GPUTextureUsage.RENDER_ATTACHMENT,
});

const uniformBufferSize = 4 * 16; // 4x4 matrix
const uniformBuffer = device.createBuffer({
size: uniformBufferSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});

const uniformBindGroup = device.createBindGroup({
layout: pipeline.getBindGroupLayout(0),
entries: [
{
binding: 0,
resource: {
buffer: uniformBuffer,
},
},
],
});

const renderPassDescriptor: GPURenderPassDescriptor = {
colorAttachments: [
{
view: undefined, // Assigned later

clearValue: [0.5, 0.5, 0.5, 1.0],
eliemichel marked this conversation as resolved.
Show resolved Hide resolved
loadOp: 'clear',
storeOp: 'store',
},
],
depthStencilAttachment: {
view: depthTexture.createView(),

depthClearValue: 1.0,
depthLoadOp: 'clear',
depthStoreOp: 'store',
},
// We instruct the render pass to write to the timestamp query before/after
timestampWrites: {
querySet: timestampQuerySet,
beginningOfPassWriteIndex: 0,
endOfPassWriteIndex: 1,
}
};

const aspect = canvas.width / canvas.height;
const projectionMatrix = mat4.perspective((2 * Math.PI) / 5, aspect, 1, 100.0);
const modelViewProjectionMatrix = mat4.create();

function getTransformationMatrix() {
const viewMatrix = mat4.identity();
mat4.translate(viewMatrix, vec3.fromValues(0, 0, -4), viewMatrix);
const now = Date.now() / 1000;
mat4.rotate(
viewMatrix,
vec3.fromValues(Math.sin(now), Math.cos(now), 0),
1,
viewMatrix
);

mat4.multiply(projectionMatrix, viewMatrix, modelViewProjectionMatrix);

return modelViewProjectionMatrix;
}

// State used to avoid firing concurrent readback of timestamp values
let hasOngoingTimestampReadback = false;

// A minimalistic perf timer class that computes mean + stddev online
class PerfCounter {
sampleCount: number;
accumulated: number;
accumulatedSq: number;

constructor() {
this.sampleCount = 0;
this.accumulated = 0;
this.accumulatedSq = 0;
}

addSample(value: number) {
this.sampleCount += 1;
this.accumulated += value;
this.accumulatedSq += value * value;
}

getAverage(): number {
return this.sampleCount === 0 ? 0 : this.accumulated / this.sampleCount;
}

getStddev(): number {
if (this.sampleCount === 0) return 0;
const avg = this.getAverage();
const variance = this.accumulatedSq / this.sampleCount - avg * avg;
return Math.sqrt(Math.max(0.0, variance));
}
}

const renderPassDurationCounter = new PerfCounter();

function frame() {
const transformationMatrix = getTransformationMatrix();
device.queue.writeBuffer(
uniformBuffer,
0,
transformationMatrix.buffer,
transformationMatrix.byteOffset,
transformationMatrix.byteLength
);
renderPassDescriptor.colorAttachments[0].view = context
.getCurrentTexture()
.createView();

const commandEncoder = device.createCommandEncoder();
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
passEncoder.setPipeline(pipeline);
passEncoder.setBindGroup(0, uniformBindGroup);
passEncoder.setVertexBuffer(0, verticesBuffer);
passEncoder.draw(cubeVertexCount);
passEncoder.end();

// After the end of the measured render pass, we resolve queries into a
// dedicated buffer.
commandEncoder.resolveQuerySet(
timestampQuerySet,
0 /* firstQuery */,
2 /* queryCount */,
timestampBuffer,
0, /* destinationOffset */
);

if (!hasOngoingTimestampReadback) {
// Copy values to the mapped buffer
commandEncoder.copyBufferToBuffer(
timestampBuffer, 0,
timestampMapBuffer, 0,
timestampBufferSize,
);
}

device.queue.submit([commandEncoder.finish()]);

// Read timestamp value back from GPU buffers
if (!hasOngoingTimestampReadback) {
hasOngoingTimestampReadback = true;
timestampMapBuffer
.mapAsync(GPUMapMode.READ, 0, timestampBufferSize)
.then(() => {
const buffer = timestampMapBuffer.getMappedRange(0, timestampBufferSize);
const timestamps = new BigUint64Array(buffer);

// Measure difference (in bigints)
const elapsedNs = timestamps[1] - timestamps[0];
eliemichel marked this conversation as resolved.
Show resolved Hide resolved
// Cast into regular int (ok because value is small after difference)
// and convert from nanoseconds to milliseconds:
const elapsedMs = Number(elapsedNs) * 1e-6;
renderPassDurationCounter.addSample(elapsedMs);
console.log("timestamps (ms): elapsed", elapsedMs, "avg", renderPassDurationCounter.getAverage());
perfDisplay.innerHTML = `Render Pass duration: ${renderPassDurationCounter.getAverage().toFixed(3)} ms ± ${renderPassDurationCounter.getStddev().toFixed(3)} ms`;

timestampMapBuffer.unmap();
hasOngoingTimestampReadback = false;
})
}

requestAnimationFrame(frame);
}
requestAnimationFrame(frame);
11 changes: 11 additions & 0 deletions sample/timestampQuery/meta.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
export default {
name: 'Timestamp Query',
description: 'This example shows how to use timestamp queries to measure render pass duration.',
filename: __DIRNAME__,
sources: [
{ path: 'main.ts' },
{ path: '../../shaders/basic.vert.wgsl' },
{ path: '../../shaders/red.frag.wgsl' },
{ path: '../../meshes/cube.ts' },
],
};
10 changes: 10 additions & 0 deletions sample/timestampQuery/sampleTextureMixColor.frag.wgsl
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
@group(0) @binding(1) var mySampler: sampler;
@group(0) @binding(2) var myTexture: texture_2d<f32>;

@fragment
fn main(
@location(0) fragUV: vec2f,
@location(1) fragPosition: vec4f
) -> @location(0) vec4f {
return textureSample(myTexture, mySampler, fragUV) * fragPosition;
}
2 changes: 1 addition & 1 deletion sample/util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ export function quitIfWebGPUNotAvailable(
}

/** Fail by showing a console error, and dialog box if possible. */
const fail = (() => {
export const fail = (() => {
type ErrorOutput = { show(msg: string): void };

function createErrorOutput() {
Expand Down
2 changes: 2 additions & 0 deletions src/samples.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import skinnedMesh from '../sample/skinnedMesh/meta';
import spookyball from '../sample/spookyball/meta';
import textRenderingMsdf from '../sample/textRenderingMsdf/meta';
import texturedCube from '../sample/texturedCube/meta';
import timestampQuery from '../sample/timestampQuery/meta';
import transparentCanvas from '../sample/transparentCanvas/meta';
import twoCubes from '../sample/twoCubes/meta';
import videoUploading from '../sample/videoUploading/meta';
Expand Down Expand Up @@ -171,6 +172,7 @@ export const pageCategories: PageCategory[] = [
samples: {
animometer,
workloadSimulator,
timestampQuery,
eliemichel marked this conversation as resolved.
Show resolved Hide resolved
},
},
];