diff --git a/sample/helloTriangle/main.ts b/sample/helloTriangle/main.ts index 55997d30..209f675b 100644 --- a/sample/helloTriangle/main.ts +++ b/sample/helloTriangle/main.ts @@ -17,7 +17,6 @@ const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); context.configure({ device, format: presentationFormat, - alphaMode: 'premultiplied', }); const pipeline = device.createRenderPipeline({ diff --git a/sample/helloTriangleMSAA/main.ts b/sample/helloTriangleMSAA/main.ts index 465d8867..b200a69b 100644 --- a/sample/helloTriangleMSAA/main.ts +++ b/sample/helloTriangleMSAA/main.ts @@ -17,7 +17,6 @@ const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); context.configure({ device, format: presentationFormat, - alphaMode: 'premultiplied', }); const sampleCount = 4; diff --git a/sample/transparentCanvas/index.html b/sample/transparentCanvas/index.html new file mode 100644 index 00000000..321b928c --- /dev/null +++ b/sample/transparentCanvas/index.html @@ -0,0 +1,65 @@ + + + + + + webgpu-samples: transparentCanvas + + + + + + +

WebGPU

+

WebGPU exposes an API for performing operations, such as rendering + and computation, on a Graphics Processing Unit. + +

Graphics Processing Units, or GPUs for short, have been essential + in enabling rich rendering and computational applications in personal + computing. WebGPU is an API that exposes the capabilities of GPU + hardware for the Web. The API is designed from the ground up to + efficiently map to (post-2014) native GPU APIs. WebGPU is not related + to WebGL and does not explicitly target OpenGL ES. + +

WebGPU sees physical GPU hardware as GPUAdapters. It provides a + connection to an adapter via GPUDevice, which manages resources, and + the device's GPUQueues, which execute commands. GPUDevice may have + its own memory with high-speed access to the processing units. + GPUBuffer and GPUTexture are the physical resources backed by GPU + memory. GPUCommandBuffer and GPURenderBundle are containers for + user-recorded commands. GPUShaderModule contains shader code. The + other resources, such as GPUSampler or GPUBindGroup, configure the + way physical resources are used by the GPU. + +

GPUs execute commands encoded in GPUCommandBuffers by feeding data + through a pipeline, which is a mix of fixed-function and programmable + stages. Programmable stages execute shaders, which are special + programs designed to run on GPU hardware. Most of the state of a + pipeline is defined by a GPURenderPipeline or a GPUComputePipeline + object. The state not included in these pipeline objects is set + during encoding with commands, such as beginRenderPass() or + setBlendConstant().` + + diff --git a/sample/transparentCanvas/main.ts b/sample/transparentCanvas/main.ts new file mode 100644 index 00000000..f13415a8 --- /dev/null +++ b/sample/transparentCanvas/main.ts @@ -0,0 +1,178 @@ +import { mat4, vec3 } from 'wgpu-matrix'; + +import { + cubeVertexArray, + cubeVertexSize, + cubeUVOffset, + cubePositionOffset, + cubeVertexCount, +} from '../../meshes/cube'; + +import basicVertWGSL from '../../shaders/basic.vert.wgsl'; +import vertexPositionColorWGSL from '../../shaders/vertexPositionColor.frag.wgsl'; +import { quitIfWebGPUNotAvailable } from '../util'; + +const canvas = document.querySelector('canvas') as HTMLCanvasElement; +const adapter = await navigator.gpu?.requestAdapter(); +const device = await adapter?.requestDevice(); +quitIfWebGPUNotAvailable(adapter, device); + +const context = canvas.getContext('webgpu') as GPUCanvasContext; + +const devicePixelRatio = window.devicePixelRatio; +canvas.width = canvas.clientWidth * devicePixelRatio; +canvas.height = canvas.clientHeight * devicePixelRatio; +const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); + +context.configure({ + device, + format: presentationFormat, + // The canvas alphaMode defaults to 'opaque', use 'premultiplied' for transparency. + alphaMode: 'premultiplied', +}); + +// Create a vertex buffer from the cube data. +const verticesBuffer = device.createBuffer({ + size: cubeVertexArray.byteLength, + usage: GPUBufferUsage.VERTEX, + mappedAtCreation: true, +}); +new Float32Array(verticesBuffer.getMappedRange()).set(cubeVertexArray); +verticesBuffer.unmap(); + +const pipeline = device.createRenderPipeline({ + layout: 'auto', + vertex: { + module: device.createShaderModule({ + code: basicVertWGSL, + }), + buffers: [ + { + arrayStride: cubeVertexSize, + attributes: [ + { + // position + shaderLocation: 0, + offset: cubePositionOffset, + format: 'float32x4', + }, + { + // uv + shaderLocation: 1, + offset: cubeUVOffset, + format: 'float32x2', + }, + ], + }, + ], + }, + fragment: { + module: device.createShaderModule({ + code: vertexPositionColorWGSL, + }), + targets: [ + { + format: presentationFormat, + }, + ], + }, + primitive: { + topology: 'triangle-list', + cullMode: 'back', + }, + + depthStencil: { + depthWriteEnabled: true, + depthCompare: 'less', + format: 'depth24plus', + }, +}); + +const depthTexture = device.createTexture({ + size: [canvas.width, canvas.height], + format: 'depth24plus', + usage: GPUTextureUsage.RENDER_ATTACHMENT, +}); + +const uniformBufferSize = 4 * 16; // 4x4 matrix +const uniformBuffer = device.createBuffer({ + size: uniformBufferSize, + usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, +}); + +const uniformBindGroup = device.createBindGroup({ + layout: pipeline.getBindGroupLayout(0), + entries: [ + { + binding: 0, + resource: { + buffer: uniformBuffer, + }, + }, + ], +}); + +const renderPassDescriptor: GPURenderPassDescriptor = { + colorAttachments: [ + { + view: undefined, // Assigned later + + clearValue: [0.5, 0.5, 0.5, 0.0], // Clear alpha to 0 + loadOp: 'clear', + storeOp: 'store', + }, + ], + depthStencilAttachment: { + view: depthTexture.createView(), + + depthClearValue: 1.0, + depthLoadOp: 'clear', + depthStoreOp: 'store', + }, +}; + +const aspect = canvas.width / canvas.height; +const projectionMatrix = mat4.perspective((2 * Math.PI) / 5, aspect, 1, 100.0); +const modelViewProjectionMatrix = mat4.create(); + +function getTransformationMatrix() { + const viewMatrix = mat4.identity(); + mat4.translate(viewMatrix, vec3.fromValues(0, 0, -4), viewMatrix); + const now = Date.now() / 1000; + mat4.rotate( + viewMatrix, + vec3.fromValues(Math.sin(now), Math.cos(now), 0), + 1, + viewMatrix + ); + + mat4.multiply(projectionMatrix, viewMatrix, modelViewProjectionMatrix); + + return modelViewProjectionMatrix; +} + +function frame() { + const transformationMatrix = getTransformationMatrix(); + device.queue.writeBuffer( + uniformBuffer, + 0, + transformationMatrix.buffer, + transformationMatrix.byteOffset, + transformationMatrix.byteLength + ); + renderPassDescriptor.colorAttachments[0].view = context + .getCurrentTexture() + .createView(); + + const commandEncoder = device.createCommandEncoder(); + const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); + passEncoder.setPipeline(pipeline); + passEncoder.setBindGroup(0, uniformBindGroup); + passEncoder.setVertexBuffer(0, verticesBuffer); + passEncoder.draw(cubeVertexCount); + passEncoder.end(); + device.queue.submit([commandEncoder.finish()]); + + requestAnimationFrame(frame); +} +requestAnimationFrame(frame); diff --git a/sample/transparentCanvas/meta.ts b/sample/transparentCanvas/meta.ts new file mode 100644 index 00000000..1b557ae0 --- /dev/null +++ b/sample/transparentCanvas/meta.ts @@ -0,0 +1,11 @@ +export default { + name: 'Transparent Cavnas', + description: 'This example shows use of a transparent WebGPU canvas.', + filename: __DIRNAME__, + sources: [ + { path: 'main.ts' }, + { path: '../../shaders/basic.vert.wgsl' }, + { path: '../../shaders/vertexPositionColorAlpha.frag.wgsl' }, + { path: '../../meshes/cube.ts' }, + ], +}; diff --git a/src/samples.ts b/src/samples.ts index f8aa64d8..d3727dc4 100644 --- a/src/samples.ts +++ b/src/samples.ts @@ -33,6 +33,7 @@ import skinnedMesh from '../sample/skinnedMesh/meta'; import spookyball from '../sample/spookyball/meta'; import textRenderingMsdf from '../sample/textRenderingMsdf/meta'; import texturedCube from '../sample/texturedCube/meta'; +import transparentCanvas from '../sample/transparentCanvas/meta'; import twoCubes from '../sample/twoCubes/meta'; import videoUploading from '../sample/videoUploading/meta'; import volumeRenderingTexture3D from '../sample/volumeRenderingTexture3D/meta'; @@ -141,6 +142,7 @@ export const pageCategories: PageCategory[] = [ samples: { resizeCanvas, resizeObserverHDDPI, + transparentCanvas, multipleCanvases, videoUploading, worker,