diff --git a/public/assets/img/brickwall_diffuse.png b/public/assets/img/brickwall_albedo.png similarity index 100% rename from public/assets/img/brickwall_diffuse.png rename to public/assets/img/brickwall_albedo.png diff --git a/public/assets/img/toybox_height.png b/public/assets/img/toybox_height.png index 9977210b..35510d73 100644 Binary files a/public/assets/img/toybox_height.png and b/public/assets/img/toybox_height.png differ diff --git a/public/assets/img/toybox_normal.png b/public/assets/img/toybox_normal.png index 91bcb56a..634728fb 100644 Binary files a/public/assets/img/toybox_normal.png and b/public/assets/img/toybox_normal.png differ diff --git a/public/assets/img/wood_diffuse.png b/public/assets/img/wood_albedo.png similarity index 100% rename from public/assets/img/wood_diffuse.png rename to public/assets/img/wood_albedo.png diff --git a/public/img/brickwall_diffuse.png b/public/img/brickwall_albedo.png similarity index 100% rename from public/img/brickwall_diffuse.png rename to public/img/brickwall_albedo.png diff --git a/public/img/toybox_height.png b/public/img/toybox_height.png index 9977210b..77e5d98b 100644 Binary files a/public/img/toybox_height.png and b/public/img/toybox_height.png differ diff --git a/public/img/toybox_normal.png b/public/img/toybox_normal.png index 91bcb56a..e7356512 100644 Binary files a/public/img/toybox_normal.png and b/public/img/toybox_normal.png differ diff --git a/public/img/wood_diffuse.png b/public/img/wood_albedo.png similarity index 100% rename from public/img/wood_diffuse.png rename to public/img/wood_albedo.png diff --git a/src/meshes/box.ts b/src/meshes/box.ts index b15ea483..2a462b5f 100644 --- a/src/meshes/box.ts +++ b/src/meshes/box.ts @@ -1,347 +1,105 @@ -import { vec3 } from 'wgpu-matrix'; -import { getMeshPosAtIndex, getMeshUVAtIndex, Mesh } from './mesh'; - -export interface BoxMesh extends Mesh { - vertices: Float32Array; - indices: Uint16Array | Uint32Array; - vertexStride: number; -} - -//// Borrowed and simplified from https://github.com/mrdoob/three.js/blob/master/src/geometries/BoxGeometry.js -//// Presumes vertex buffer alignment of verts, normals, and uvs -const createBoxGeometry = ( - width = 1.0, - height = 1.0, - depth = 1.0, - widthSegments = 1.0, - heightSegments = 1.0, - depthSegments = 1.0 -) => { - widthSegments = Math.floor(widthSegments); - heightSegments = Math.floor(heightSegments); - depthSegments = Math.floor(depthSegments); - - const indices = []; - const vertNormalUVBuffer = []; - - let numVertices = 0; - - const buildPlane = ( - u: 0 | 1 | 2, - v: 0 | 1 | 2, - w: 0 | 1 | 2, - udir: -1 | 1, - vdir: -1 | 1, - planeWidth: number, - planeHeight: number, - planeDepth: number, - xSections: number, - ySections: number - ) => { - const segmentWidth = planeWidth / xSections; - const segmentHeight = planeHeight / ySections; - - const widthHalf = planeWidth / 2; - const heightHalf = planeHeight / 2; - const depthHalf = planeDepth / 2; - - const gridX1 = xSections + 1; - const gridY1 = ySections + 1; - - let vertexCounter = 0; - - const vertex = vec3.create(); - const normal = vec3.create(); - for (let iy = 0; iy < gridY1; iy++) { - const y = iy * segmentHeight - heightHalf; - - for (let ix = 0; ix < gridX1; ix++) { - const x = ix * segmentWidth - widthHalf; - - //Calculate plane vertices - vertex[u] = x * udir; - vertex[v] = y * vdir; - vertex[w] = depthHalf; - vertNormalUVBuffer.push(...vertex); - - //Caclulate normal - normal[u] = 0; - normal[v] = 0; - normal[w] = planeDepth > 0 ? 1.0 : -1.0; - vertNormalUVBuffer.push(...normal); - - //Calculate uvs - vertNormalUVBuffer.push(ix / xSections); - vertNormalUVBuffer.push(1 - iy / ySections); - - vertexCounter += 1; - } - } - - for (let iy = 0; iy < ySections; iy++) { - for (let ix = 0; ix < xSections; ix++) { - const a = numVertices + ix + gridX1 * iy; - const b = numVertices + ix + gridX1 * (iy + 1); - const c = numVertices + (ix + 1) + gridX1 * (iy + 1); - const d = numVertices + (ix + 1) + gridX1 * iy; - - //Push vertex indices - //6 indices for each face - indices.push(a, b, d); - indices.push(b, c, d); - - numVertices += vertexCounter; +import { Mesh } from './mesh'; + +/** + * Constructs a box mesh with the given dimensions. + * The vertex buffer will have the following vertex fields (in the given order): + * position : float32x3 + * normal : float32x3 + * uv : float32x2 + * tangent : float32x3 + * bitangent : float32x3 + * @param width the width of the box + * @param height the height of the box + * @param depth the depth of the box + * @returns the box mesh with tangent and bitangents. + */ +export function createBoxMeshWithTangents( + width: number, + height: number, + depth: number +): Mesh { + // __________ + // / /| y + // / +y / | ^ + // /_________/ | | + // | |+x| +---> x + // | +z | | / + // | | / z + // |_________|/ + // + const pX = 0; // +x + const nX = 1; // -x + const pY = 2; // +y + const nY = 3; // -y + const pZ = 4; // +z + const nZ = 5; // -z + const faces = [ + { tangent: nZ, bitangent: pY, normal: pX }, + { tangent: pZ, bitangent: pY, normal: nX }, + { tangent: pX, bitangent: nZ, normal: pY }, + { tangent: pX, bitangent: pZ, normal: nY }, + { tangent: pX, bitangent: pY, normal: pZ }, + { tangent: nX, bitangent: pY, normal: nZ }, + ]; + const verticesPerSide = 4; + const indicesPerSize = 6; + const f32sPerVertex = 14; // position : vec3f, tangent : vec3f, bitangent : vec3f, normal : vec3f, uv :vec2f + const vertexStride = f32sPerVertex * 4; + const vertices = new Float32Array( + faces.length * verticesPerSide * f32sPerVertex + ); + const indices = new Uint16Array(faces.length * indicesPerSize); + const halfVecs = [ + [+width / 2, 0, 0], // +x + [-width / 2, 0, 0], // -x + [0, +height / 2, 0], // +y + [0, -height / 2, 0], // -y + [0, 0, +depth / 2], // +z + [0, 0, -depth / 2], // -z + ]; + + let vertexOffset = 0; + let indexOffset = 0; + for (let faceIndex = 0; faceIndex < faces.length; faceIndex++) { + const face = faces[faceIndex]; + const tangent = halfVecs[face.tangent]; + const bitangent = halfVecs[face.bitangent]; + const normal = halfVecs[face.normal]; + + for (let u = 0; u < 2; u++) { + for (let v = 0; v < 2; v++) { + for (let i = 0; i < 3; i++) { + vertices[vertexOffset++] = + normal[i] + + (u == 0 ? -1 : 1) * tangent[i] + + (v == 0 ? -1 : 1) * bitangent[i]; + } + for (let i = 0; i < 3; i++) { + vertices[vertexOffset++] = normal[i]; + } + vertices[vertexOffset++] = u; + vertices[vertexOffset++] = v; + for (let i = 0; i < 3; i++) { + vertices[vertexOffset++] = tangent[i]; + } + for (let i = 0; i < 3; i++) { + vertices[vertexOffset++] = bitangent[i]; + } } } - }; - - //Side face - buildPlane( - 2, //z - 1, //y - 0, //x - -1, - -1, - depth, - height, - width, - depthSegments, - heightSegments - ); - - //Side face - buildPlane( - 2, //z - 1, //y - 0, //x - 1, - -1, - depth, - height, - -width, - depthSegments, - heightSegments - ); - - //Bottom face - buildPlane( - 0, //x - 2, //z - 1, //y - 1, - 1, - width, - depth, - height, - widthSegments, - depthSegments - ); - - //Top face - buildPlane( - 0, //x - 2, //z - 1, //y - 1, - -1, - width, - depth, - -height, - widthSegments, - depthSegments - ); - - //Side faces - buildPlane( - 0, //x - 1, //y - 2, //z - 1, - -1, - width, - height, - depth, - widthSegments, - heightSegments - ); - //Side face - buildPlane( - 0, //x - 1, //y - 2, //z - -1, - -1, - width, - height, - -depth, - widthSegments, - heightSegments - ); + indices[indexOffset++] = faceIndex * verticesPerSide + 0; + indices[indexOffset++] = faceIndex * verticesPerSide + 2; + indices[indexOffset++] = faceIndex * verticesPerSide + 1; - return { - vertices: vertNormalUVBuffer, - indices: indices, - }; -}; - -type IndexFormat = 'uint16' | 'uint32'; - -// Box mesh code ported from threejs, with addition of indexFormat specifier for vertex pulling -export const createBoxMesh = ( - width = 1.0, - height = 1.0, - depth = 1.0, - widthSegments = 1.0, - heightSegments = 1.0, - depthSegments = 1.0, - indexFormat: IndexFormat = 'uint16' -): Mesh => { - const { vertices, indices } = createBoxGeometry( - width, - height, - depth, - widthSegments, - heightSegments, - depthSegments - ); - - const vertexStride = 8 * Float32Array.BYTES_PER_ELEMENT; //calculateVertexStride(vertexProperties); - - const indicesArray = - indexFormat === 'uint16' - ? new Uint16Array(indices) - : new Uint32Array(indices); - - return { - vertices: new Float32Array(vertices), - indices: indicesArray, - vertexStride: vertexStride, - }; -}; - -export const createBoxMeshWithTangents = ( - width = 1.0, - height = 1.0, - depth = 1.0, - widthSegments = 1.0, - heightSegments = 1.0, - depthSegments = 1.0 -): Mesh => { - const mesh = createBoxMesh( - width, - height, - depth, - widthSegments, - heightSegments, - depthSegments - ); - - const originalStrideElements = - mesh.vertexStride / Float32Array.BYTES_PER_ELEMENT; - - const vertexCount = mesh.vertices.length / originalStrideElements; - - const tangents = new Array(vertexCount); - const bitangents = new Array(vertexCount); - const counts = new Array(vertexCount); - for (let i = 0; i < vertexCount; i++) { - tangents[i] = [0, 0, 0]; - bitangents[i] = [0, 0, 0]; - counts[i] = 0; - } - - for (let i = 0; i < mesh.indices.length; i += 3) { - const [idx1, idx2, idx3] = [ - mesh.indices[i], - mesh.indices[i + 1], - mesh.indices[i + 2], - ]; - - const [pos1, pos2, pos3] = [ - getMeshPosAtIndex(mesh, idx1), - getMeshPosAtIndex(mesh, idx2), - getMeshPosAtIndex(mesh, idx3), - ]; - - const [uv1, uv2, uv3] = [ - getMeshUVAtIndex(mesh, idx1), - getMeshUVAtIndex(mesh, idx2), - getMeshUVAtIndex(mesh, idx3), - ]; - - const edge1 = vec3.sub(pos2, pos1); - const edge2 = vec3.sub(pos3, pos1); - const deltaUV1 = vec3.sub(uv2, uv1); - const deltaUV2 = vec3.sub(uv3, uv1); - - // Edge of a triangle moves in both u and v direction (2d) - // deltaU * tangent vector + deltav * bitangent - // Manipulating the data into matrices, we get an equation - - const constantVal = - 1.0 / (deltaUV1[0] * deltaUV2[1] - deltaUV1[1] * deltaUV2[0]); - - const tangent = [ - constantVal * (deltaUV2[1] * edge1[0] - deltaUV1[1] * edge2[0]), - constantVal * (deltaUV2[1] * edge1[1] - deltaUV1[1] * edge2[1]), - constantVal * (deltaUV2[1] * edge1[2] - deltaUV1[1] * edge2[2]), - ]; - - const bitangent = [ - constantVal * (-deltaUV2[0] * edge1[0] + deltaUV1[0] * edge2[0]), - constantVal * (-deltaUV2[0] * edge1[1] + deltaUV1[0] * edge2[1]), - constantVal * (-deltaUV2[0] * edge1[2] + deltaUV1[0] * edge2[2]), - ]; - - //Accumulate tangents and bitangents - tangents[idx1] = vec3.add(tangents[idx1], tangent); - bitangents[idx1] = vec3.add(bitangents[idx1], bitangent); - tangents[idx2] = vec3.add(tangents[idx2], tangent); - bitangents[idx2] = vec3.add(bitangents[idx2], bitangent); - tangents[idx3] = vec3.add(tangents[idx3], tangent); - bitangents[idx3] = vec3.add(bitangents[idx3], bitangent); - - //Increment index count - counts[idx1]++; - counts[idx2]++; - counts[idx3]++; - } - - for (let i = 0; i < tangents.length; i++) { - tangents[i] = vec3.divScalar(tangents[i], counts[i]); - bitangents[i] = vec3.divScalar(bitangents[i], counts[i]); - } - - const newStrideElements = 14; - const wTangentArray = new Float32Array(vertexCount * newStrideElements); - - for (let i = 0; i < vertexCount; i++) { - //Copy original vertex data (pos, normal uv) - wTangentArray.set( - //Get the original vertex [8 elements] (3 ele pos, 3 ele normal, 2 ele uv) - mesh.vertices.subarray( - i * originalStrideElements, - (i + 1) * originalStrideElements - ), - //And put it at the proper location in the new array [14 bytes = 8 og + 6 empty] - i * newStrideElements - ); - //For each vertex, place tangent after originalStride - wTangentArray.set( - tangents[i], - i * newStrideElements + originalStrideElements - ); - //Place bitangent after 3 elements of tangent - wTangentArray.set( - bitangents[i], - i * newStrideElements + originalStrideElements + 3 - ); + indices[indexOffset++] = faceIndex * verticesPerSide + 2; + indices[indexOffset++] = faceIndex * verticesPerSide + 3; + indices[indexOffset++] = faceIndex * verticesPerSide + 1; } return { - vertices: wTangentArray, - indices: mesh.indices, - vertexStride: mesh.vertexStride + Float32Array.BYTES_PER_ELEMENT * 3 * 2, + vertices, + indices, + vertexStride, }; -}; +} diff --git a/src/sample/normalMap/main.ts b/src/sample/normalMap/main.ts index cbc2c0a8..78b2e9c1 100644 --- a/src/sample/normalMap/main.ts +++ b/src/sample/normalMap/main.ts @@ -1,4 +1,4 @@ -import { mat4 } from 'wgpu-matrix'; +import { mat4, vec3 } from 'wgpu-matrix'; import { makeSample, SampleInit } from '../../components/SampleLayout'; import normalMapWGSL from './normalMap.wgsl'; import { createMeshRenderable } from '../../meshes/mesh'; @@ -33,7 +33,7 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { interface GUISettings { 'Bump Mode': - | 'Diffuse Texture' + | 'Albedo Texture' | 'Normal Texture' | 'Depth Texture' | 'Normal Map' @@ -60,7 +60,7 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { lightPosX: 1.7, lightPosY: 0.7, lightPosZ: -1.9, - lightIntensity: 0.02, + lightIntensity: 5.0, depthScale: 0.05, depthLayers: 16, Texture: 'Spiral', @@ -76,24 +76,26 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { usage: GPUTextureUsage.RENDER_ATTACHMENT, }); - const uniformBuffer = device.createBuffer({ + const spaceTransformsBuffer = device.createBuffer({ // Buffer holding projection, view, and model matrices plus padding bytes size: MAT4X4_BYTES * 4, usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, }); - const mapMethodBuffer = device.createBuffer({ + const mapInfoBuffer = device.createBuffer({ // Buffer holding mapping type, light uniforms, and depth uniforms - size: Float32Array.BYTES_PER_ELEMENT * 7, + size: Float32Array.BYTES_PER_ELEMENT * 8, usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, }); + const mapInfoArray = new ArrayBuffer(mapInfoBuffer.size); + const mapInfoView = new DataView(mapInfoArray, 0, mapInfoArray.byteLength); // Fetch the image and upload it into a GPUTexture. - let woodDiffuseTexture: GPUTexture; + let woodAlbedoTexture: GPUTexture; { - const response = await fetch('../assets/img/wood_diffuse.png'); + const response = await fetch('../assets/img/wood_albedo.png'); const imageBitmap = await createImageBitmap(await response.blob()); - woodDiffuseTexture = createTextureFromImage(device, imageBitmap); + woodAlbedoTexture = createTextureFromImage(device, imageBitmap); } let spiralNormalTexture: GPUTexture; @@ -124,11 +126,11 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { toyboxHeightTexture = createTextureFromImage(device, imageBitmap); } - let brickwallDiffuseTexture: GPUTexture; + let brickwallAlbedoTexture: GPUTexture; { - const response = await fetch('../assets/img/brickwall_diffuse.png'); + const response = await fetch('../assets/img/brickwall_albedo.png'); const imageBitmap = await createImageBitmap(await response.blob()); - brickwallDiffuseTexture = createTextureFromImage(device, imageBitmap); + brickwallAlbedoTexture = createTextureFromImage(device, imageBitmap); } let brickwallNormalTexture: GPUTexture; @@ -184,7 +186,7 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { ], ['buffer', 'buffer'], [{ type: 'uniform' }, { type: 'uniform' }], - [[{ buffer: uniformBuffer }, { buffer: mapMethodBuffer }]], + [[{ buffer: spaceTransformsBuffer }, { buffer: mapInfoBuffer }]], 'Frame', device ); @@ -204,19 +206,19 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { [ [ sampler, - woodDiffuseTexture.createView(), + woodAlbedoTexture.createView(), spiralNormalTexture.createView(), spiralHeightTexture.createView(), ], [ sampler, - woodDiffuseTexture.createView(), + woodAlbedoTexture.createView(), toyboxNormalTexture.createView(), toyboxHeightTexture.createView(), ], [ sampler, - brickwallDiffuseTexture.createView(), + brickwallAlbedoTexture.createView(), brickwallNormalTexture.createView(), brickwallHeightTexture.createView(), ], @@ -250,9 +252,9 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { } // Change the model mapping type - const getMappingType = (): number => { + const getMode = (): number => { switch (settings['Bump Mode']) { - case 'Diffuse Texture': + case 'Albedo Texture': return 0; case 'Normal Texture': return 1; @@ -285,7 +287,7 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { }; gui.add(settings, 'Bump Mode', [ - 'Diffuse Texture', + 'Albedo Texture', 'Normal Texture', 'Depth Texture', 'Normal Map', @@ -301,7 +303,7 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { lightPosXController.setValue(1.7); lightPosYController.setValue(0.7); lightPosZController.setValue(-1.9); - lightIntensityController.setValue(0.02); + lightIntensityController.setValue(5.0); }); const lightPosXController = lightFolder .add(settings, 'lightPosX', -5, 5) @@ -313,53 +315,54 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { .add(settings, 'lightPosZ', -5, 5) .step(0.1); const lightIntensityController = lightFolder - .add(settings, 'lightIntensity', 0.0, 0.1) - .step(0.002); + .add(settings, 'lightIntensity', 0.0, 10) + .step(0.1); depthFolder.add(settings, 'depthScale', 0.0, 0.1).step(0.01); depthFolder.add(settings, 'depthLayers', 1, 32).step(1); function frame() { if (!pageState.active) return; - // Write to normal map shader + // Update spaceTransformsBuffer const viewMatrix = getViewMatrix(); - - const modelMatrix = getModelMatrix(); - + const worldViewMatrix = mat4.mul(viewMatrix, getModelMatrix()); + const worldViewProjMatrix = mat4.mul(projectionMatrix, worldViewMatrix); const matrices = new Float32Array([ - ...projectionMatrix, - ...viewMatrix, - ...modelMatrix, + ...worldViewProjMatrix, + ...worldViewMatrix, ]); - const mappingType = getMappingType(); - + // Update mapInfoBuffer + const lightPosWS = vec3.create( + settings.lightPosX, + settings.lightPosY, + settings.lightPosZ + ); + const lightPosVS = vec3.transformMat4(lightPosWS, viewMatrix); + const mode = getMode(); device.queue.writeBuffer( - uniformBuffer, + spaceTransformsBuffer, 0, matrices.buffer, matrices.byteOffset, matrices.byteLength ); - device.queue.writeBuffer( - mapMethodBuffer, - 0, - new Uint32Array([mappingType]) - ); - - device.queue.writeBuffer( - mapMethodBuffer, - 4, - new Float32Array([ - settings.lightPosX, - settings.lightPosY, - settings.lightPosZ, - settings.lightIntensity, - settings.depthScale, - settings.depthLayers, - ]) - ); + // struct MapInfo { + // lightPosVS: vec3f, + // mode: u32, + // lightIntensity: f32, + // depthScale: f32, + // depthLayers: f32, + // } + mapInfoView.setFloat32(0, lightPosVS[0], true); + mapInfoView.setFloat32(4, lightPosVS[1], true); + mapInfoView.setFloat32(8, lightPosVS[2], true); + mapInfoView.setUint32(12, mode, true); + mapInfoView.setFloat32(16, settings.lightIntensity, true); + mapInfoView.setFloat32(20, settings.depthScale, true); + mapInfoView.setFloat32(24, settings.depthLayers, true); + device.queue.writeBuffer(mapInfoBuffer, 0, mapInfoArray); renderPassDescriptor.colorAttachments[0].view = context .getCurrentTexture() diff --git a/src/sample/normalMap/normalMap.wgsl b/src/sample/normalMap/normalMap.wgsl index 12696306..54b4684d 100644 --- a/src/sample/normalMap/normalMap.wgsl +++ b/src/sample/normalMap/normalMap.wgsl @@ -1,14 +1,19 @@ -struct SpaceTransformUniforms { - projMatrix: mat4x4f, - viewMatrix: mat4x4f, - modelMatrix: mat4x4f, + +const modeAlbedoTexture = 0; +const modeNormalTexture = 1; +const modeDepthTexture = 2; +const modeNormalMap = 3; +const modeParallaxScale = 4; +const modeSteepParallax = 5; + +struct SpaceTransforms { + worldViewProjMatrix: mat4x4f, + worldViewMatrix: mat4x4f, } -struct Uniforms_MapInfo { - mappingType: u32, - lightPosX: f32, - lightPosY: f32, - lightPosZ: f32, +struct MapInfo { + lightPosVS: vec3f, // Light position in view space + mode: u32, lightIntensity: f32, depthScale: f32, depthLayers: f32, @@ -24,176 +29,142 @@ struct VertexInput { } struct VertexOutput { - @builtin(position) Position : vec4f, - @location(0) normal: vec3f, - @location(1) uv : vec2f, - // Vertex position in world space - @location(2) posWS: vec3f, - // Vertex position in tangent space - @location(3) posTS: vec3f, - // View position in tangent space - @location(4) viewTS: vec3f, - // Extracted components of our tbn matrix - @location(5) tbnTS0: vec3, - @location(6) tbnTS1: vec3, - @location(7) tbnTS2: vec3, + @builtin(position) posCS : vec4f, // vertex position in clip space + @location(0) posVS : vec3f, // vertex position in view space + @location(1) tangentVS: vec3f, // vertex tangent in view space + @location(2) bitangentVS: vec3f, // vertex tangent in view space + @location(3) normalVS: vec3f, // vertex normal in view space + @location(5) uv : vec2f, // vertex texture coordinate } // Uniforms -@group(0) @binding(0) var spaceTransform : SpaceTransformUniforms; -@group(0) @binding(1) var mapInfo: Uniforms_MapInfo; +@group(0) @binding(0) var spaceTransform : SpaceTransforms; +@group(0) @binding(1) var mapInfo: MapInfo; // Texture info @group(1) @binding(0) var textureSampler: sampler; -@group(1) @binding(1) var diffuseTexture: texture_2d; +@group(1) @binding(1) var albedoTexture: texture_2d; @group(1) @binding(2) var normalTexture: texture_2d; @group(1) @binding(3) var depthTexture: texture_2d; -fn parallax_uv( - uv: vec2f, - viewDirTS: vec3f, - depthSample: f32, - depthScale: f32, -) -> vec2f { - if (mapInfo.mappingType == 4) { - // Perturb uv coordinates based on depth and camera direction - let p = viewDirTS.xy * (depthSample * depthScale) / viewDirTS.z; - return uv - p; - } - // Break up depth space into layers - let depthPerLayer = 1.0 / f32(mapInfo.depthLayers); - // Start at lowest depth - var currentDepth = 0.0; - let delta_uv = viewDirTS.xy * depthScale / (viewDirTS.z * mapInfo.depthLayers); - var prev_uv = uv; - var cur_uv = uv; - - var depthFromTexture = textureSample(depthTexture, textureSampler, cur_uv).r; - var prevDepthFromTexture = depthFromTexture; - var prevCurrentDepth = currentDepth; - for (var i: u32 = 0; i < 32; i++) { - currentDepth += depthPerLayer; - prev_uv = cur_uv; - cur_uv -= delta_uv; - depthFromTexture = textureSample(depthTexture, textureSampler, cur_uv).r; - // Determine whether current depth is greater than depth map - // Once we reach a certain threshold, we stop updating cur_uv - cur_uv = select(cur_uv, prev_uv, depthFromTexture < currentDepth); - prevDepthFromTexture = select(depthFromTexture, prevDepthFromTexture, prevDepthFromTexture < currentDepth); - prevCurrentDepth = select(currentDepth, prevCurrentDepth, prevDepthFromTexture < currentDepth); - } - return cur_uv; -} - -fn when_greater(v1: f32, v2: f32) -> f32 { - return max(sign(v1 - v2), 0.0); -} @vertex fn vertexMain(input: VertexInput) -> VertexOutput { var output : VertexOutput; - // Create the Model to View Matrix - let MV = spaceTransform.viewMatrix * spaceTransform.modelMatrix; - // Create the Model to View to Projection Matrix - let MVP = spaceTransform.projMatrix * MV; - - // Get Clip space transforms and pass through values out of the way - output.Position = MVP * input.position; - output.uv = input.uv; - output.normal = input.normal; - - // Multiply pos by modelMatrix to get the vertex/fragment's position in world space - output.posWS = vec3f((spaceTransform.modelMatrix * input.position).xyz); - - var MV3x3 = mat3x3f( - MV[0].xyz, - MV[1].xyz, - MV[2].xyz - ); - - // Get unit vectors of normal, tangent, and bitangents in model space - let vertexTangent = normalize(input.vert_tan); - let vertexBitangent = normalize(input.vert_bitan); - let vertexNormal = normalize(input.normal); - - // Convert tbn unit vectors to mv space for a model view tbn - var tbnTS = transpose( - MV3x3 * mat3x3f( - vertexTangent, - vertexBitangent, - vertexNormal - ) - ); - // Condense to vec3s so they can be passed to fragment shader - output.tbnTS0 = tbnTS[0]; - output.tbnTS1 = tbnTS[1]; - output.tbnTS2 = tbnTS[2]; - // Get the tangent space position of the vertex - output.posTS = tbnTS * (MV * input.position).xyz; - // Get the tangent space position of the camera view - output.viewTS = tbnTS * vec3f(0.0, 0.0, 0.0); + output.posCS = spaceTransform.worldViewProjMatrix * input.position; + output.posVS = (spaceTransform.worldViewMatrix * input.position).xyz; + output.tangentVS = (spaceTransform.worldViewMatrix * vec4(input.vert_tan, 0)).xyz; + output.bitangentVS = (spaceTransform.worldViewMatrix * vec4(input.vert_bitan, 0)).xyz; + output.normalVS = (spaceTransform.worldViewMatrix * vec4(input.normal, 0)).xyz; + output.uv = input.uv; return output; } @fragment fn fragmentMain(input: VertexOutput) -> @location(0) vec4f { - // Reconstruct tbnTS - let tbnTS = mat3x3f( - input.tbnTS0, - input.tbnTS1, - input.tbnTS2, + // Build the matrix to convert from tangent space to view space + let tangentToView = mat3x3f( + input.tangentVS, + input.bitangentVS, + input.normalVS, ); - // Get direction of view in tangent space - let viewDirTS = normalize(input.viewTS - input.posTS); + // The inverse of a non-scaling affine 3x3 matrix is it's transpose + let viewToTangent = transpose(tangentToView); - // Get position, direction, and distance of light in tangent space (no need to multiply by model matrix as there is no model) - let lightPosVS = spaceTransform.viewMatrix * vec4f(mapInfo.lightPosX, mapInfo.lightPosY, mapInfo.lightPosZ, 1.0); - let lightPosTS = tbnTS * lightPosVS.xyz; - let lightDirTS = normalize(lightPosTS - input.posTS); - let lightDistanceTS = distance(input.posTS, lightPosTS); + // Calculate the normalized vector in tangent space from the camera to the fragment + let viewDirTS = normalize(viewToTangent * input.posVS); - let depthMap = textureSample(depthTexture, textureSampler, input.uv); + // Apply parallax to the texture coordinate, if parallax is enabled + var uv : vec2f; + switch (mapInfo.mode) { + case modeParallaxScale: { + uv = parallaxScale(input.uv, viewDirTS); + break; + } + case modeSteepParallax: { + uv = parallaxSteep(input.uv, viewDirTS); + break; + } + default: { + uv = input.uv; + break; + } + } - let uv = select( - parallax_uv(input.uv, viewDirTS, depthMap.r, mapInfo.depthScale), - input.uv, - mapInfo.mappingType < 4 - ); + // Sample the albedo texture + let albedoSample = textureSample(albedoTexture, textureSampler, uv); - // Get values from textures - let diffuseMap = textureSample(diffuseTexture, textureSampler, uv); - let normalMap = textureSample(normalTexture, textureSampler, uv); - - // Get normal in tangent space - let normalTS = normalize((normalMap.xyz * 2.0) - 1.0); - - // Calculate diffusion lighting - let lightColorIntensity = vec3f(255.0, 255.0, 255.0) * mapInfo.lightIntensity; - //How similar is the normal to the lightDirection - let diffuseStrength = clamp( - dot(normalTS, lightDirTS), 0.0, 1.0 - ); - // Strenght inversely proportional to square of distance from light - let diffuseLight = (lightColorIntensity * diffuseStrength) / (lightDistanceTS * lightDistanceTS); + // Sample the normal texture + let normalSample = textureSample(normalTexture, textureSampler, uv); - switch (mapInfo.mappingType) { - // Output the diffuse texture - case 0: { - return vec4f(diffuseMap.rgb, 1.0); + switch (mapInfo.mode) { + case modeAlbedoTexture: { // Output the albedo sample + return albedoSample; } - // Output the normal map - case 1: { - return vec4f(normalMap.rgb, 1.0); + case modeNormalTexture: { // Output the normal sample + return normalSample; } - // Output the height map - case 2: { - return vec4f(depthMap.rgb, 1.0); + case modeDepthTexture: { // Output the depth map + return textureSample(depthTexture, textureSampler, input.uv); } default: { - return vec4f(diffuseMap.rgb * diffuseLight, 1.0); + // Transform the normal sample to a tangent space normal + let normalTS = normalSample.xyz * 2 - 1; + + // Convert normal from tangent space to view space, and normalize + let normalVS = normalize(tangentToView * normalTS); + + // Calculate the vector in view space from the light position to the fragment + let fragToLightVS = mapInfo.lightPosVS - input.posVS; + + // Calculate the square distance from the light to the fragment + let lightSqrDist = dot(fragToLightVS, fragToLightVS); + + // Calculate the normalized vector in view space from the fragment to the light + let lightDirVS = fragToLightVS * inverseSqrt(lightSqrDist); + + // Light strength is inversely proportional to square of distance from light + let diffuseLight = mapInfo.lightIntensity * max(dot(lightDirVS, normalVS), 0) / lightSqrDist; + + // The diffuse is the albedo color multiplied by the diffuseLight + let diffuse = albedoSample.rgb * diffuseLight; + + return vec4f(diffuse, 1.0); } } -} \ No newline at end of file +} + + +// Returns the uv coordinate displaced in the view direction by a magnitude calculated by the depth +// sampled from the depthTexture and the angle between the surface normal and view direction. +fn parallaxScale(uv: vec2f, viewDirTS: vec3f) -> vec2f { + let depthSample = textureSample(depthTexture, textureSampler, uv).r; + return uv + viewDirTS.xy * (depthSample * mapInfo.depthScale) / -viewDirTS.z; +} + +// Returns the uv coordinates displaced in the view direction by ray-tracing the depth map. +fn parallaxSteep(startUV: vec2f, viewDirTS: vec3f) -> vec2f { + // Calculate derivatives of the texture coordinate, so we can sample the texture with non-uniform + // control flow. + let ddx = dpdx(startUV); + let ddy = dpdy(startUV); + + // Calculate the delta step in UV and depth per iteration + let uvDelta = viewDirTS.xy * mapInfo.depthScale / (-viewDirTS.z * mapInfo.depthLayers); + let depthDelta = 1.0 / f32(mapInfo.depthLayers); + let posDelta = vec3(uvDelta, depthDelta); + + // Walk the depth texture, and stop when the ray intersects the depth map + var pos = vec3(startUV, 0); + for (var i = 0; i < 32; i++) { + if (pos.z >= textureSampleGrad(depthTexture, textureSampler, pos.xy, ddx, ddy).r) { + break; // Hit the surface + } + pos += posDelta; + } + + return pos.xy; +}