|
45 | 45 | <script type="module">
|
46 | 46 | import GUI from '/3rdparty/muigui-0.x.module.js';
|
47 | 47 | import {mat4, mat3, vec3} from '/3rdparty/wgpu-matrix.module.js';
|
| 48 | +import {RollingAverage, TimingHelper} from './timing-helper.js'; |
| 49 | + |
| 50 | +const fpsAverage = new RollingAverage(); |
| 51 | +const jsAverage = new RollingAverage(); |
| 52 | +const gpuAverage = new RollingAverage(); |
| 53 | +const mathAverage = new RollingAverage(); |
48 | 54 |
|
49 | 55 | const cssColorToRGBA8 = (() => {
|
50 | 56 | const canvas = new OffscreenCanvas(1, 1);
|
|
77 | 83 |
|
78 | 84 | async function main() {
|
79 | 85 | const adapter = await navigator.gpu?.requestAdapter();
|
80 |
| - const device = await adapter?.requestDevice(); |
| 86 | + const canTimestamp = adapter.features.has('timestamp-query'); |
| 87 | + const device = await adapter?.requestDevice({ |
| 88 | + requiredFeatures: [ |
| 89 | + ...(canTimestamp ? ['timestamp-query'] : []), |
| 90 | + ], |
| 91 | + }); |
81 | 92 | if (!device) {
|
82 |
| - fail('need a browser that supports WebGPU'); |
83 |
| - return; |
| 93 | + fail('could not init WebGPU'); |
84 | 94 | }
|
85 | 95 |
|
| 96 | + const timingHelper = new TimingHelper(device); |
86 | 97 | const infoElem = document.querySelector('#info');
|
87 | 98 |
|
88 | 99 | // Get a WebGPU context from the canvas and configure it
|
|
266 | 277 | minFilter: 'nearest',
|
267 | 278 | });
|
268 | 279 |
|
269 |
| - const maxObjects = 100; |
| 280 | + const maxObjects = 10000; |
270 | 281 | const objectInfos = [];
|
271 | 282 |
|
272 | 283 | for (let i = 0; i < maxObjects; ++i) {
|
|
349 | 360 | colorAttachments: [
|
350 | 361 | {
|
351 | 362 | // view: <- to be filled out when we render
|
| 363 | + clearValue: [0.3, 0.3, 0.3, 1], |
352 | 364 | loadOp: 'clear',
|
353 | 365 | storeOp: 'store',
|
354 | 366 | },
|
|
361 | 373 | },
|
362 | 374 | };
|
363 | 375 |
|
| 376 | + const canvasToSizeMap = new WeakMap(); |
364 | 377 | const degToRad = d => d * Math.PI / 180;
|
365 | 378 |
|
366 | 379 | const settings = {
|
367 |
| - numObjects: maxObjects, |
| 380 | + numObjects: 1000, |
368 | 381 | render: true,
|
369 | 382 | };
|
370 | 383 |
|
|
384 | 397 |
|
385 | 398 | const startTimeMs = performance.now();
|
386 | 399 |
|
| 400 | + let width = 1; |
| 401 | + let height = 1; |
| 402 | + if (settings.render) { |
| 403 | + const entry = canvasToSizeMap.get(canvas); |
| 404 | + if (entry) { |
| 405 | + width = Math.max(1, entry.contentBoxSize[0].inlineSize, device.limits.maxTextureDimension2D); |
| 406 | + height = Math.max(1, entry.contentBoxSize[0].blockSize, device.limits.maxTextureDimension2D); |
| 407 | + } |
| 408 | + } |
| 409 | + if (canvas.width !== width || canvas.height !== height) { |
| 410 | + canvas.width = width; |
| 411 | + canvas.height = height; |
| 412 | + } |
| 413 | + |
387 | 414 | // Get the current texture from the canvas context and
|
388 | 415 | // set it as the texture to render to.
|
389 | 416 | const canvasTexture = context.getCurrentTexture();
|
|
406 | 433 | renderPassDescriptor.depthStencilAttachment.view = depthTexture.createView();
|
407 | 434 |
|
408 | 435 | const encoder = device.createCommandEncoder();
|
409 |
| - const pass = encoder.beginRenderPass(renderPassDescriptor); |
| 436 | + const pass = timingHelper.beginRenderPass(encoder, renderPassDescriptor); |
410 | 437 | pass.setPipeline(pipeline);
|
411 | 438 | pass.setVertexBuffer(0, positionBuffer);
|
412 | 439 | pass.setVertexBuffer(1, normalBuffer);
|
|
431 | 458 | // Combine the view and projection matrixes
|
432 | 459 | const viewProjectionMatrix = mat4.multiply(projection, viewMatrix);
|
433 | 460 |
|
| 461 | + let mathElapsedTimeMs = 0; |
| 462 | + |
434 | 463 | for (let i = 0; i < settings.numObjects; ++i) {
|
435 | 464 | const {
|
436 | 465 | bindGroup,
|
|
452 | 481 | scale,
|
453 | 482 | shininess,
|
454 | 483 | } = objectInfos[i];
|
| 484 | + const mathTimeStartMs = performance.now(); |
| 485 | + |
455 | 486 | // Copy the viewProjectionMatrix into the uniform values for this object
|
456 | 487 | viewProjectionValue.set(viewProjectionMatrix);
|
457 | 488 |
|
458 | 489 | // Compute a world matrix
|
| 490 | +// mat4.identity(worldValue); |
| 491 | +// mat4.axisRotate(worldValue, axis, time * speed, worldValue); |
| 492 | +// mat4.translate(worldValue, [radius, 0, 0], worldValue); |
| 493 | +// mat4.rotateY(worldValue, rotationSpeed * time, worldValue); |
| 494 | +// mat4.scale(worldValue, [scale, scale, scale], worldValue); |
| 495 | + |
459 | 496 | mat4.identity(worldValue);
|
460 |
| - mat4.axisRotate(worldValue, axis, time * speed, worldValue); |
461 |
| - mat4.translate(worldValue, [radius, 0, 0], worldValue); |
462 |
| - mat4.rotateY(worldValue, rotationSpeed * time, worldValue); |
| 497 | + mat4.axisRotate(worldValue, axis, i + time * speed, worldValue); |
| 498 | + mat4.translate(worldValue, [0, 0, Math.sin(i * 3.721 + time * speed) * radius], worldValue); |
| 499 | + mat4.translate(worldValue, [0, 0, Math.sin(i * 9.721 + time * 0.1) * radius], worldValue); |
| 500 | + mat4.rotateX(worldValue, time * rotationSpeed + i, worldValue); |
463 | 501 | mat4.scale(worldValue, [scale, scale, scale], worldValue);
|
464 | 502 |
|
465 | 503 | // Inverse and transpose it into the worldInverseTranspose value
|
|
470 | 508 | viewWorldPositionValue.set(eye);
|
471 | 509 | shininessValue[0] = shininess;
|
472 | 510 |
|
| 511 | + mathElapsedTimeMs += performance.now() - mathTimeStartMs; |
| 512 | + |
473 | 513 | // upload the uniform values to the uniform buffer
|
474 | 514 | device.queue.writeBuffer(uniformBuffer, 0, uniformValues);
|
475 | 515 |
|
|
482 | 522 | const commandBuffer = encoder.finish();
|
483 | 523 | device.queue.submit([commandBuffer]);
|
484 | 524 |
|
| 525 | + timingHelper.getResult().then(gpuTime => { |
| 526 | + gpuAverage.addSample(gpuTime / 1000); |
| 527 | + }); |
| 528 | + |
485 | 529 | const elapsedTimeMs = performance.now() - startTimeMs;
|
| 530 | + fpsAverage.addSample(1 / deltaTime); |
| 531 | + jsAverage.addSample(elapsedTimeMs); |
| 532 | + mathAverage.addSample(mathElapsedTimeMs); |
| 533 | + |
| 534 | + |
486 | 535 | infoElem.textContent = `\
|
487 |
| -js : ${elapsedTimeMs.toFixed(0)}ms |
488 |
| -fps: ${(1 / deltaTime).toFixed(1)} |
| 536 | +js : ${jsAverage.get().toFixed(1)}ms |
| 537 | +math: ${mathAverage.get().toFixed(1)}ms |
| 538 | +fps : ${fpsAverage.get().toFixed(0)} |
| 539 | +gpu : ${canTimestamp ? `${(gpuAverage.get() / 1000).toFixed(1)}ms` : 'N/A'} |
489 | 540 | `;
|
490 | 541 |
|
491 | 542 | requestAnimationFrame(render);
|
492 | 543 | }
|
493 | 544 | requestAnimationFrame(render);
|
494 | 545 |
|
495 | 546 | const observer = new ResizeObserver(entries => {
|
496 |
| - for (const entry of entries) { |
497 |
| - const canvas = entry.target; |
498 |
| - const width = entry.contentBoxSize[0].inlineSize; |
499 |
| - const height = entry.contentBoxSize[0].blockSize; |
500 |
| - canvas.width = Math.max(1, Math.min(width, device.limits.maxTextureDimension2D)); |
501 |
| - canvas.height = Math.max(1, Math.min(height, device.limits.maxTextureDimension2D)); |
502 |
| - } |
| 547 | + entries.forEach(e => canvasToSizeMap.set(e.target, e)); |
503 | 548 | });
|
504 | 549 | observer.observe(canvas);
|
505 | 550 | }
|
|
0 commit comments