diff --git a/src/meshes/box.ts b/src/meshes/box.ts index 3238f34d..c8968614 100644 --- a/src/meshes/box.ts +++ b/src/meshes/box.ts @@ -213,8 +213,6 @@ export const createBoxMesh = ( ? new Uint16Array(indices) : new Uint32Array(indices); - console.log(indicesArray); - return { vertices: new Float32Array(vertices), indices: indicesArray, @@ -245,11 +243,13 @@ export const createBoxMeshWithTangents = ( const vertexCount = mesh.vertices.length / originalStrideElements; const tangents = new Array(vertexCount); - tangents.fill(vec3.create(0.0, 0.0, 0.0)); const bitangents = new Array(vertexCount); - bitangents.fill(vec3.create(0.0, 0.0, 0.0)); const counts = new Array(vertexCount); - counts.fill(0); + for (let i = 0; i < vertexCount; i++) { + tangents[i] = [0, 0, 0]; + bitangents[i] = [0, 0, 0]; + counts[i] = 0; + } for (let i = 0; i < mesh.indices.length; i += 3) { const [idx1, idx2, idx3] = [ @@ -282,17 +282,17 @@ export const createBoxMeshWithTangents = ( const constantVal = 1.0 / (deltaUV1[0] * deltaUV2[1] - deltaUV1[1] * deltaUV2[0]); - const tangent = vec3.fromValues( + const tangent = [ constantVal * (deltaUV2[1] * edge1[0] - deltaUV1[1] * edge2[0]), constantVal * (deltaUV2[1] * edge1[1] - deltaUV1[1] * edge2[1]), constantVal * (deltaUV2[1] * edge1[2] - deltaUV1[1] * edge2[2]) - ); + ]; - const bitangent = vec3.fromValues( + const bitangent = [ constantVal * (-deltaUV2[0] * edge1[0] + deltaUV1[0] * edge2[0]), constantVal * (-deltaUV2[0] * edge1[1] + deltaUV1[0] * edge2[1]), constantVal * (-deltaUV2[0] * edge1[2] + deltaUV1[0] * edge2[2]) - ); + ]; //Accumulate tangents and bitangents tangents[idx1] = vec3.add(tangents[idx1], tangent); diff --git a/src/sample/normalMap/main.ts b/src/sample/normalMap/main.ts index 6927aa32..07833ba4 100644 --- a/src/sample/normalMap/main.ts +++ b/src/sample/normalMap/main.ts @@ -22,7 +22,7 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { const device = await adapter.requestDevice(); if (!pageState.active) return; const context = canvas.getContext('webgpu') as GPUCanvasContext; - const devicePixelRatio = window.devicePixelRatio || 1; + const devicePixelRatio = window.devicePixelRatio; canvas.width = canvas.clientWidth * devicePixelRatio; canvas.height = canvas.clientHeight * devicePixelRatio; const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); @@ -56,11 +56,11 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { const settings: GUISettings = { 'Bump Mode': 'Normal Map', cameraPosX: 0.0, - cameraPosY: 0.0, - cameraPosZ: -2.4, + cameraPosY: 0.8, + cameraPosZ: -1.4, lightPosX: 1.7, - lightPosY: -0.7, - lightPosZ: 1.9, + lightPosY: 0.7, + lightPosZ: -1.9, lightIntensity: 0.02, depthScale: 0.05, depthLayers: 16, @@ -204,22 +204,16 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { const projectionMatrix = mat4.perspective( (2 * Math.PI) / 5, aspect, - 1, - 100.0 + 0.1, + 10.0 ) as Float32Array; function getViewMatrix() { - const viewMatrix = mat4.identity(); - mat4.translate( - viewMatrix, - vec3.fromValues( - settings.cameraPosX, - settings.cameraPosY, - settings.cameraPosZ - ), - viewMatrix + return mat4.lookAt( + [settings.cameraPosX, settings.cameraPosY, settings.cameraPosZ], + [0, 0, 0], + [0, 1, 0] ); - return viewMatrix; } function getModelMatrix() { @@ -308,11 +302,9 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { if (!pageState.active) return; // Write to normal map shader - const viewMatrixTemp = getViewMatrix(); - const viewMatrix = viewMatrixTemp as Float32Array; + const viewMatrix = getViewMatrix(); - const modelMatrixTemp = getModelMatrix(); - const modelMatrix = modelMatrixTemp as Float32Array; + const modelMatrix = getModelMatrix(); const matrices = new Float32Array([ ...projectionMatrix, @@ -321,6 +313,7 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { ]); const mappingType = getMappingType(); + console.log(mappingType); device.queue.writeBuffer( uniformBuffer, @@ -333,8 +326,13 @@ const init: SampleInit = async ({ canvas, pageState, gui }) => { device.queue.writeBuffer( mapMethodBuffer, 0, + new Uint32Array([mappingType]) + ); + + device.queue.writeBuffer( + mapMethodBuffer, + 4, new Float32Array([ - mappingType, settings.lightPosX, settings.lightPosY, settings.lightPosZ, diff --git a/src/sample/normalMap/normalMap.wgsl b/src/sample/normalMap/normalMap.wgsl index 5ed72a6a..02898daf 100644 --- a/src/sample/normalMap/normalMap.wgsl +++ b/src/sample/normalMap/normalMap.wgsl @@ -39,14 +39,6 @@ struct VertexOutput { @location(7) tbnTS2: vec3, } -fn transpose3x3(mat: mat3x3f) -> mat3x3f { - return mat3x3f( - mat[0][0], mat[1][0], mat[2][0], - mat[0][1], mat[1][1], mat[2][1], - mat[0][2], mat[1][2], mat[2][2], - ); -} - // Uniforms @group(0) @binding(0) var spaceTransform : SpaceTransformUniforms; @group(0) @binding(1) var mapInfo: Uniforms_MapInfo; @@ -65,20 +57,20 @@ fn parallax_uv( ) -> vec2f { if (mapInfo.mappingType == 4) { // Perturb uv coordinates based on depth and camera direction - var p: vec2f = viewDirTS.xy * (depthSample * depthScale) / viewDirTS.z; + var p = viewDirTS.xy * (depthSample * depthScale) / viewDirTS.z; return uv - p; } // Break up depth space into layers - var depthPerLayer: f32 = 1.0 / f32(mapInfo.depthLayers); + var depthPerLayer = 1.0 / f32(mapInfo.depthLayers); // Start at lowest depth - var currentDepth: f32 = 0.0; - var delta_uv: vec2 = viewDirTS.xy * depthScale / (viewDirTS.z * mapInfo.depthLayers); + var currentDepth = 0.0; + var delta_uv = viewDirTS.xy * depthScale / (viewDirTS.z * mapInfo.depthLayers); var prev_uv = uv; var cur_uv = uv; - var depthFromTexture: f32 = textureSample(depthTexture, textureSampler, cur_uv).r; - var prevDepthFromTexture: f32 = depthFromTexture; - var prevCurrentDepth: f32 = currentDepth; + var depthFromTexture = textureSample(depthTexture, textureSampler, cur_uv).r; + var prevDepthFromTexture = depthFromTexture; + var prevCurrentDepth = currentDepth; for (var i: u32 = 0; i < 32; i++) { currentDepth += depthPerLayer; prev_uv = cur_uv; @@ -120,12 +112,12 @@ fn vertexMain(input: VertexInput) -> VertexOutput { ); // Get unit vectors of normal, tangent, and bitangents in model space - var vertexTangent: vec3f = normalize(input.vert_tan); - var vertexBitangent: vec3f = normalize(input.vert_bitan); - var vertexNormal: vec3f = normalize(input.normal); + var vertexTangent = normalize(input.vert_tan); + var vertexBitangent = normalize(input.vert_bitan); + var vertexNormal = normalize(input.normal); // Convert tbn unit vectors to mv space for a model view tbn - var tbnTS = transpose3x3( + var tbnTS = transpose( MV3x3 * mat3x3f( vertexTangent, vertexBitangent, @@ -158,9 +150,9 @@ fn fragmentMain(input: VertexOutput) -> @location(0) vec4f { var viewDirTS = normalize(input.viewTS - input.posTS); // Get position, direction, and distance of light in tangent space (no need to multiply by model matrix as there is no model) - var lightPosVS: vec4f = spaceTransform.viewMatrix * vec4f(mapInfo.lightPosX, mapInfo.lightPosY, mapInfo.lightPosZ, 1.0); - var lightPosTS: vec3f = tbnTS * lightPosVS.xyz; - var lightDirTS: vec3f = normalize(lightPosTS - input.posTS); + var lightPosVS = spaceTransform.viewMatrix * vec4f(mapInfo.lightPosX, mapInfo.lightPosY, mapInfo.lightPosZ, 1.0); + var lightPosTS = tbnTS * lightPosVS.xyz; + var lightDirTS = normalize(lightPosTS - input.posTS); var lightDistanceTS = distance(input.posTS, lightPosTS); let depthMap = textureSample(depthTexture, textureSampler, input.uv);