diff --git a/sample/timestampQuery/PerfCounter.ts b/sample/timestampQuery/PerfCounter.ts
new file mode 100644
index 00000000..ec2f05de
--- /dev/null
+++ b/sample/timestampQuery/PerfCounter.ts
@@ -0,0 +1,29 @@
+// A minimalistic perf timer class that computes mean + stddev online
+export default class PerfCounter {
+ sampleCount: number;
+ accumulated: number;
+ accumulatedSq: number;
+
+ constructor() {
+ this.sampleCount = 0;
+ this.accumulated = 0;
+ this.accumulatedSq = 0;
+ }
+
+ addSample(value: number) {
+ this.sampleCount += 1;
+ this.accumulated += value;
+ this.accumulatedSq += value * value;
+ }
+
+ getAverage(): number {
+ return this.sampleCount === 0 ? 0 : this.accumulated / this.sampleCount;
+ }
+
+ getStddev(): number {
+ if (this.sampleCount === 0) return 0;
+ const avg = this.getAverage();
+ const variance = this.accumulatedSq / this.sampleCount - avg * avg;
+ return Math.sqrt(Math.max(0.0, variance));
+ }
+}
diff --git a/sample/timestampQuery/TimestampQueryManager.ts b/sample/timestampQuery/TimestampQueryManager.ts
new file mode 100644
index 00000000..46696418
--- /dev/null
+++ b/sample/timestampQuery/TimestampQueryManager.ts
@@ -0,0 +1,97 @@
+// Regroups all timestamp-related operations and resources.
+export default class TimestampQueryManager {
+ // The device may not support timestamp queries, on which case this whole
+ // class does nothing.
+ timestampSupported: boolean;
+
+ // Number of timestamp counters
+ timestampCount: number;
+
+ // The query objects. This is meant to be used in a ComputePassDescriptor's
+ // or RenderPassDescriptor's 'timestampWrites' field.
+ timestampQuerySet: GPUQuerySet;
+
+ // A buffer where to store query results
+ timestampBuffer: GPUBuffer;
+
+ // A buffer to map this result back to CPU
+ timestampMapBuffer: GPUBuffer;
+
+ // State used to avoid firing concurrent readback of timestamp values
+ hasOngoingTimestampReadback: boolean;
+
+ // Device must have the "timestamp-query" feature
+ constructor(device: GPUDevice, timestampCount: number) {
+ this.timestampSupported = device.features.has('timestamp-query');
+ if (!this.timestampSupported) return;
+
+ this.timestampCount = timestampCount;
+
+ // Create timestamp queries
+ this.timestampQuerySet = device.createQuerySet({
+ type: 'timestamp',
+ count: timestampCount, // begin and end
+ });
+
+ // Create a buffer where to store the result of GPU queries
+ const timestampByteSize = 8; // timestamps are uint64
+ const timestampBufferSize = timestampCount * timestampByteSize;
+ this.timestampBuffer = device.createBuffer({
+ size: timestampBufferSize,
+ usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.QUERY_RESOLVE,
+ });
+
+ // Create a buffer to map the result back to the CPU
+ this.timestampMapBuffer = device.createBuffer({
+ size: timestampBufferSize,
+ usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ,
+ });
+
+ this.hasOngoingTimestampReadback = false;
+ }
+
+ // Resolve all timestamp queries and copy the result into the map buffer
+ resolveAll(commandEncoder: GPUCommandEncoder) {
+ if (!this.timestampSupported) return;
+
+ // After the end of the measured render pass, we resolve queries into a
+ // dedicated buffer.
+ commandEncoder.resolveQuerySet(
+ this.timestampQuerySet,
+ 0 /* firstQuery */,
+ this.timestampCount /* queryCount */,
+ this.timestampBuffer,
+ 0 /* destinationOffset */
+ );
+
+ if (!this.hasOngoingTimestampReadback) {
+ // Copy values to the mapped buffer
+ commandEncoder.copyBufferToBuffer(
+ this.timestampBuffer,
+ 0,
+ this.timestampMapBuffer,
+ 0,
+ this.timestampBuffer.size
+ );
+ }
+ }
+
+ // Once resolved, we can read back the value of timestamps
+ readAsync(onTimestampReadBack: (timestamps: BigUint64Array) => void): void {
+ if (!this.timestampSupported) return;
+ if (this.hasOngoingTimestampReadback) return;
+
+ this.hasOngoingTimestampReadback = true;
+
+ const buffer = this.timestampMapBuffer;
+ void buffer.mapAsync(GPUMapMode.READ).then(() => {
+ const rawData = buffer.getMappedRange();
+ const timestamps = new BigUint64Array(rawData);
+
+ onTimestampReadBack(timestamps);
+
+ buffer.unmap();
+ this.hasOngoingTimestampReadback = false;
+ });
+ }
+}
diff --git a/sample/timestampQuery/index.html b/sample/timestampQuery/index.html
new file mode 100644
index 00000000..5094605f
--- /dev/null
+++ b/sample/timestampQuery/index.html
@@ -0,0 +1,30 @@
+
+
+
+
+
+ webgpu-samples: timestampQuery
+
+
+
+
+
+
+
+
diff --git a/sample/timestampQuery/main.ts b/sample/timestampQuery/main.ts
new file mode 100644
index 00000000..66f11ad0
--- /dev/null
+++ b/sample/timestampQuery/main.ts
@@ -0,0 +1,254 @@
+import { mat4, vec3 } from 'wgpu-matrix';
+
+import {
+ cubeVertexArray,
+ cubeVertexSize,
+ cubeUVOffset,
+ cubePositionOffset,
+ cubeVertexCount,
+} from '../../meshes/cube';
+
+import basicVertWGSL from '../../shaders/basic.vert.wgsl';
+import fragmentWGSL from '../../shaders/black.frag.wgsl';
+import { quitIfWebGPUNotAvailable } from '../util';
+
+import PerfCounter from './PerfCounter';
+import TimestampQueryManager from './TimestampQueryManager';
+
+const canvas = document.querySelector('canvas') as HTMLCanvasElement;
+const adapter = await navigator.gpu?.requestAdapter();
+
+// The use of timestamps require a dedicated adapter feature:
+// The adapter may or may not support timestamp queries. If not, we simply
+// don't measure timestamps and deactivate the timer display.
+const supportsTimestampQueries = adapter?.features.has('timestamp-query');
+
+const device = await adapter?.requestDevice({
+ // We request a device that has support for timestamp queries
+ requiredFeatures: supportsTimestampQueries ? ['timestamp-query'] : [],
+});
+quitIfWebGPUNotAvailable(adapter, device);
+
+// GPU-side timer and the CPU-side counter where we accumulate statistics:
+// NB: Look for 'timestampQueryManager' in this file to locate parts of this
+// snippets that are related to timestamps. Most of the logic is in
+// TimestampQueryManager.ts.
+const timestampQueryManager = new TimestampQueryManager(device, 2);
+const renderPassDurationCounter = new PerfCounter();
+
+const context = canvas.getContext('webgpu') as GPUCanvasContext;
+
+const devicePixelRatio = window.devicePixelRatio;
+canvas.width = canvas.clientWidth * devicePixelRatio;
+canvas.height = canvas.clientHeight * devicePixelRatio;
+const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
+
+context.configure({
+ device,
+ format: presentationFormat,
+});
+
+// UI for perf counter
+const perfDisplayContainer = document.createElement('div');
+perfDisplayContainer.style.color = 'white';
+perfDisplayContainer.style.background = 'black';
+perfDisplayContainer.style.position = 'absolute';
+perfDisplayContainer.style.top = '10px';
+perfDisplayContainer.style.left = '10px';
+
+const perfDisplay = document.createElement('pre');
+perfDisplayContainer.appendChild(perfDisplay);
+if (canvas.parentNode) {
+ canvas.parentNode.appendChild(perfDisplayContainer);
+} else {
+ console.error('canvas.parentNode is null');
+}
+
+if (!supportsTimestampQueries) {
+ perfDisplay.innerHTML = 'Timestamp queries are not supported';
+}
+
+// Create a vertex buffer from the cube data.
+const verticesBuffer = device.createBuffer({
+ size: cubeVertexArray.byteLength,
+ usage: GPUBufferUsage.VERTEX,
+ mappedAtCreation: true,
+});
+new Float32Array(verticesBuffer.getMappedRange()).set(cubeVertexArray);
+verticesBuffer.unmap();
+
+const pipeline = device.createRenderPipeline({
+ layout: 'auto',
+ vertex: {
+ module: device.createShaderModule({
+ code: basicVertWGSL,
+ }),
+ buffers: [
+ {
+ arrayStride: cubeVertexSize,
+ attributes: [
+ {
+ // position
+ shaderLocation: 0,
+ offset: cubePositionOffset,
+ format: 'float32x4',
+ },
+ {
+ // uv
+ shaderLocation: 1,
+ offset: cubeUVOffset,
+ format: 'float32x2',
+ },
+ ],
+ },
+ ],
+ },
+ fragment: {
+ module: device.createShaderModule({
+ code: fragmentWGSL,
+ }),
+ targets: [
+ {
+ format: presentationFormat,
+ },
+ ],
+ },
+ primitive: {
+ topology: 'triangle-list',
+
+ // Backface culling since the cube is solid piece of geometry.
+ // Faces pointing away from the camera will be occluded by faces
+ // pointing toward the camera.
+ cullMode: 'back',
+ },
+
+ // Enable depth testing so that the fragment closest to the camera
+ // is rendered in front.
+ depthStencil: {
+ depthWriteEnabled: true,
+ depthCompare: 'less',
+ format: 'depth24plus',
+ },
+});
+
+const depthTexture = device.createTexture({
+ size: [canvas.width, canvas.height],
+ format: 'depth24plus',
+ usage: GPUTextureUsage.RENDER_ATTACHMENT,
+});
+
+const uniformBufferSize = 4 * 16; // 4x4 matrix
+const uniformBuffer = device.createBuffer({
+ size: uniformBufferSize,
+ usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
+});
+
+const uniformBindGroup = device.createBindGroup({
+ layout: pipeline.getBindGroupLayout(0),
+ entries: [
+ {
+ binding: 0,
+ resource: {
+ buffer: uniformBuffer,
+ },
+ },
+ ],
+});
+
+const renderPassDescriptor: GPURenderPassDescriptor = {
+ colorAttachments: [
+ {
+ view: undefined, // Assigned later
+
+ clearValue: [0.95, 0.95, 0.95, 1.0],
+ loadOp: 'clear',
+ storeOp: 'store',
+ },
+ ],
+ depthStencilAttachment: {
+ view: depthTexture.createView(),
+
+ depthClearValue: 1.0,
+ depthLoadOp: 'clear',
+ depthStoreOp: 'store',
+ },
+ // We instruct the render pass to write to the timestamp query before/after
+ timestampWrites: {
+ querySet: timestampQueryManager.timestampQuerySet,
+ beginningOfPassWriteIndex: 0,
+ endOfPassWriteIndex: 1,
+ },
+};
+
+const aspect = canvas.width / canvas.height;
+const projectionMatrix = mat4.perspective((2 * Math.PI) / 5, aspect, 1, 100.0);
+const modelViewProjectionMatrix = mat4.create();
+
+function getTransformationMatrix() {
+ const viewMatrix = mat4.identity();
+ mat4.translate(viewMatrix, vec3.fromValues(0, 0, -4), viewMatrix);
+ const now = Date.now() / 1000;
+ mat4.rotate(
+ viewMatrix,
+ vec3.fromValues(Math.sin(now), Math.cos(now), 0),
+ 1,
+ viewMatrix
+ );
+
+ mat4.multiply(projectionMatrix, viewMatrix, modelViewProjectionMatrix);
+
+ return modelViewProjectionMatrix;
+}
+
+function frame() {
+ const transformationMatrix = getTransformationMatrix();
+ device.queue.writeBuffer(
+ uniformBuffer,
+ 0,
+ transformationMatrix.buffer,
+ transformationMatrix.byteOffset,
+ transformationMatrix.byteLength
+ );
+ renderPassDescriptor.colorAttachments[0].view = context
+ .getCurrentTexture()
+ .createView();
+
+ const commandEncoder = device.createCommandEncoder();
+ const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
+ passEncoder.setPipeline(pipeline);
+ passEncoder.setBindGroup(0, uniformBindGroup);
+ passEncoder.setVertexBuffer(0, verticesBuffer);
+ passEncoder.draw(cubeVertexCount);
+ passEncoder.end();
+
+ // Resolve timestamp queries, so that their result is available in
+ // a GPU-sude buffer.
+ timestampQueryManager.resolveAll(commandEncoder);
+
+ device.queue.submit([commandEncoder.finish()]);
+
+ // Read timestamp value back from GPU buffers
+ timestampQueryManager.readAsync((timestamps) => {
+ // This may happen (see spec https://gpuweb.github.io/gpuweb/#timestamp)
+ if (timestamps[1] < timestamps[0]) return;
+
+ // Measure difference (in bigints)
+ const elapsedNs = timestamps[1] - timestamps[0];
+ // Cast into regular int (ok because value is small after difference)
+ // and convert from nanoseconds to milliseconds:
+ const elapsedMs = Number(elapsedNs) * 1e-6;
+ renderPassDurationCounter.addSample(elapsedMs);
+ console.log(
+ 'timestamps (ms): elapsed',
+ elapsedMs,
+ 'avg',
+ renderPassDurationCounter.getAverage()
+ );
+ perfDisplay.innerHTML = `Render Pass duration: ${renderPassDurationCounter
+ .getAverage()
+ .toFixed(3)} ms ± ${renderPassDurationCounter.getStddev().toFixed(3)} ms`;
+ });
+
+ requestAnimationFrame(frame);
+}
+requestAnimationFrame(frame);
diff --git a/sample/timestampQuery/meta.ts b/sample/timestampQuery/meta.ts
new file mode 100644
index 00000000..5fae2dc3
--- /dev/null
+++ b/sample/timestampQuery/meta.ts
@@ -0,0 +1,14 @@
+export default {
+ name: 'Timestamp Query',
+ description:
+ 'This example shows how to use timestamp queries to measure render pass duration.',
+ filename: __DIRNAME__,
+ sources: [
+ { path: 'TimestampQueryManager.ts' },
+ { path: 'PerfCounter.ts' },
+ { path: 'main.ts' },
+ { path: '../../shaders/basic.vert.wgsl' },
+ { path: '../../shaders/black.frag.wgsl' },
+ { path: '../../meshes/cube.ts' },
+ ],
+};
diff --git a/shaders/black.frag.wgsl b/shaders/black.frag.wgsl
new file mode 100644
index 00000000..cb832ded
--- /dev/null
+++ b/shaders/black.frag.wgsl
@@ -0,0 +1,4 @@
+@fragment
+fn main() -> @location(0) vec4f {
+ return vec4(0.0, 0.0, 0.0, 1.0);
+}
\ No newline at end of file
diff --git a/src/samples.ts b/src/samples.ts
index 3c5d390c..626db011 100644
--- a/src/samples.ts
+++ b/src/samples.ts
@@ -34,6 +34,7 @@ import skinnedMesh from '../sample/skinnedMesh/meta';
import spookyball from '../sample/spookyball/meta';
import textRenderingMsdf from '../sample/textRenderingMsdf/meta';
import texturedCube from '../sample/texturedCube/meta';
+import timestampQuery from '../sample/timestampQuery/meta';
import transparentCanvas from '../sample/transparentCanvas/meta';
import twoCubes from '../sample/twoCubes/meta';
import videoUploading from '../sample/videoUploading/meta';
@@ -94,6 +95,7 @@ export const pageCategories: PageCategory[] = [
occlusionQuery,
samplerParameters,
alphaToCoverage,
+ timestampQuery,
},
},