Skip to content

Commit

Permalink
[HDRP] Various backports
Browse files Browse the repository at this point in the history
Various backports
  • Loading branch information
adrien-de-tocqueville authored and Evergreen committed Apr 15, 2024
1 parent 7730722 commit a297a97
Show file tree
Hide file tree
Showing 26 changed files with 505 additions and 386 deletions.
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
using System;
using System.IO;
using Unity.Collections;
using Unity.Collections.LowLevel.Unsafe;
using UnityEditor;
#if UNITY_2020_2_OR_NEWER
using UnityEditor.AssetImporters;
Expand Down Expand Up @@ -118,7 +119,7 @@ public string GetPhotometricType()
int width = 2 * textureSize;
int height = 2 * textureSize;

NativeArray<Color32> colorBuffer;
NativeArray<Color> colorBuffer;

switch (m_iesReader.PhotometricType)
{
Expand Down Expand Up @@ -148,7 +149,7 @@ public string GetPhotometricType()
/// <returns>A Generated 2D texture doing the projection of the IES using the Gnomonic projection of the bottom half hemisphere with the given 'cone angle'</returns>
public (string, Texture) Generate2DCookie(TextureImporterCompression compression, float coneAngle, int textureSize, bool applyLightAttenuation)
{
NativeArray<Color32> colorBuffer;
NativeArray<Color> colorBuffer;

switch (m_iesReader.PhotometricType)
{
Expand All @@ -171,7 +172,7 @@ public string GetPhotometricType()
int width = 2 * textureSize;
int height = textureSize;

NativeArray<Color32> colorBuffer;
NativeArray<Color> colorBuffer;

switch (m_iesReader.PhotometricType)
{
Expand All @@ -189,7 +190,7 @@ public string GetPhotometricType()
return GenerateTexture(TextureImporterType.Default, TextureImporterShape.Texture2D, compression, width, height, colorBuffer);
}

(string, Texture) GenerateTexture(TextureImporterType type, TextureImporterShape shape, TextureImporterCompression compression, int width, int height, NativeArray<Color32> colorBuffer)
(string, Texture) GenerateTexture(TextureImporterType type, TextureImporterShape shape, TextureImporterCompression compression, int width, int height, NativeArray<Color> colorBuffer)
{
// Default values set by the TextureGenerationSettings constructor can be found in this file on GitHub:
// https://github.com/Unity-Technologies/UnityCsReference/blob/master/Editor/Mono/AssetPipeline/TextureGenerator.bindings.cs
Expand Down Expand Up @@ -218,6 +219,7 @@ public string GetPhotometricType()
platformSettings.maxTextureSize = 2048;
platformSettings.resizeAlgorithm = TextureResizeAlgorithm.Bilinear;
platformSettings.textureCompression = compression;
platformSettings.format = TextureImporterFormat.RGB9E5;

TextureGenerationOutput output = TextureGenerator.GenerateTexture(settings, colorBuffer);

Expand All @@ -229,21 +231,22 @@ public string GetPhotometricType()
return (output.importInspectorWarnings, output.output);
}

private static byte PackIESValue(float value)
Color ComputePixelColor(float horizontalAnglePosition, float verticalAnglePosition, float attenuation = 1.0f)
{
return (byte)Math.Clamp(value * 255, 0, 255);
float value = m_iesReader.InterpolateBilinear(horizontalAnglePosition, verticalAnglePosition) / (m_iesReader.MaxCandelas * attenuation);
return new Color(value, value, value, value);
}

NativeArray<Color32> BuildTypeACylindricalTexture(int width, int height)
NativeArray<Color> BuildTypeACylindricalTexture(int width, int height)
{
float stepU = 360f / (width - 1);
float stepV = 180f / (height - 1);

var textureBuffer = new NativeArray<Color32>(width * height, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
var textureBuffer = new NativeArray<Color>(width * height, Allocator.Temp, NativeArrayOptions.UninitializedMemory);

for (int y = 0; y < height; y++)
{
var slice = new NativeSlice<Color32>(textureBuffer, y * width, width);
var slice = new NativeSlice<Color>(textureBuffer, y * width, width);

float latitude = y * stepV - 90f; // in range [-90..+90] degrees

Expand All @@ -255,24 +258,23 @@ NativeArray<Color32> BuildTypeACylindricalTexture(int width, int height)

float horizontalAnglePosition = m_iesReader.ComputeTypeAorBHorizontalAnglePosition(longitude);

byte value = PackIESValue(m_iesReader.InterpolateBilinear(horizontalAnglePosition, verticalAnglePosition) / m_iesReader.MaxCandelas);
slice[x] = new Color32(value, value, value, value);
slice[x] = ComputePixelColor(horizontalAnglePosition, verticalAnglePosition);
}
}

return textureBuffer;
}

NativeArray<Color32> BuildTypeBCylindricalTexture(int width, int height)
NativeArray<Color> BuildTypeBCylindricalTexture(int width, int height)
{
float stepU = k_TwoPi / (width - 1);
float stepV = Mathf.PI / (height - 1);

var textureBuffer = new NativeArray<Color32>(width * height, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
var textureBuffer = new NativeArray<Color>(width * height, Allocator.Temp, NativeArrayOptions.UninitializedMemory);

for (int y = 0; y < height; y++)
{
var slice = new NativeSlice<Color32>(textureBuffer, y * width, width);
var slice = new NativeSlice<Color>(textureBuffer, y * width, width);

float v = y * stepV - k_HalfPi; // in range [-90..+90] degrees

Expand All @@ -293,24 +295,23 @@ NativeArray<Color32> BuildTypeBCylindricalTexture(int width, int height)
float horizontalAnglePosition = m_iesReader.ComputeTypeAorBHorizontalAnglePosition(longitude);
float verticalAnglePosition = m_iesReader.ComputeVerticalAnglePosition(latitude);

byte value = PackIESValue(m_iesReader.InterpolateBilinear(horizontalAnglePosition, verticalAnglePosition) / m_iesReader.MaxCandelas);
slice[x] = new Color32(value, value, value, value);
slice[x] = ComputePixelColor(horizontalAnglePosition, verticalAnglePosition);
}
}

return textureBuffer;
}

NativeArray<Color32> BuildTypeCCylindricalTexture(int width, int height)
NativeArray<Color> BuildTypeCCylindricalTexture(int width, int height)
{
float stepU = k_TwoPi / (width - 1);
float stepV = Mathf.PI / (height - 1);

var textureBuffer = new NativeArray<Color32>(width * height, Allocator.Temp, NativeArrayOptions.UninitializedMemory);
var textureBuffer = new NativeArray<Color>(width * height, Allocator.Temp, NativeArrayOptions.UninitializedMemory);

for (int y = 0; y < height; y++)
{
var slice = new NativeSlice<Color32>(textureBuffer, y * width, width);
var slice = new NativeSlice<Color>(textureBuffer, y * width, width);

float v = y * stepV - k_HalfPi; // in range [-90..+90] degrees

Expand All @@ -331,25 +332,24 @@ NativeArray<Color32> BuildTypeCCylindricalTexture(int width, int height)
float horizontalAnglePosition = m_iesReader.ComputeTypeCHorizontalAnglePosition(longitude);
float verticalAnglePosition = m_iesReader.ComputeVerticalAnglePosition(latitude);

byte value = PackIESValue(m_iesReader.InterpolateBilinear(horizontalAnglePosition, verticalAnglePosition) / m_iesReader.MaxCandelas);
slice[x] = new Color32(value, value, value, value);
slice[x] = ComputePixelColor(horizontalAnglePosition, verticalAnglePosition);
}
}

return textureBuffer;
}

NativeArray<Color32> BuildTypeAGnomonicTexture(float coneAngle, int size, bool applyLightAttenuation)
NativeArray<Color> BuildTypeAGnomonicTexture(float coneAngle, int size, bool applyLightAttenuation)
{
float limitUV = Mathf.Tan(0.5f * coneAngle * Mathf.Deg2Rad);
float stepUV = (2 * limitUV) / (size - 3);

var textureBuffer = new NativeArray<Color32>(size * size, Allocator.Temp, NativeArrayOptions.ClearMemory);
var textureBuffer = new NativeArray<Color>(size * size, Allocator.Temp, NativeArrayOptions.ClearMemory);

// Leave a one-pixel black border around the texture to avoid cookie spilling.
for (int y = 1; y < size - 1; y++)
{
var slice = new NativeSlice<Color32>(textureBuffer, y * size, size);
var slice = new NativeSlice<Color>(textureBuffer, y * size, size);

float v = (y - 1) * stepUV - limitUV;

Expand All @@ -368,25 +368,24 @@ NativeArray<Color32> BuildTypeAGnomonicTexture(float coneAngle, int size, bool a
// Factor in the light attenuation further from the texture center.
float lightAttenuation = applyLightAttenuation ? rayLengthSquared : 1f;

byte value = PackIESValue(m_iesReader.InterpolateBilinear(horizontalAnglePosition, verticalAnglePosition) / (m_iesReader.MaxCandelas * lightAttenuation));
slice[x] = new Color32(value, value, value, value);
slice[x] = ComputePixelColor(horizontalAnglePosition, verticalAnglePosition, lightAttenuation);
}
}

return textureBuffer;
}

NativeArray<Color32> BuildTypeBGnomonicTexture(float coneAngle, int size, bool applyLightAttenuation)
NativeArray<Color> BuildTypeBGnomonicTexture(float coneAngle, int size, bool applyLightAttenuation)
{
float limitUV = Mathf.Tan(0.5f * coneAngle * Mathf.Deg2Rad);
float stepUV = (2 * limitUV) / (size - 3);

var textureBuffer = new NativeArray<Color32>(size * size, Allocator.Temp, NativeArrayOptions.ClearMemory);
var textureBuffer = new NativeArray<Color>(size * size, Allocator.Temp, NativeArrayOptions.ClearMemory);

// Leave a one-pixel black border around the texture to avoid cookie spilling.
for (int y = 1; y < size - 1; y++)
{
var slice = new NativeSlice<Color32>(textureBuffer, y * size, size);
var slice = new NativeSlice<Color>(textureBuffer, y * size, size);

float v = (y - 1) * stepUV - limitUV;

Expand All @@ -406,25 +405,24 @@ NativeArray<Color32> BuildTypeBGnomonicTexture(float coneAngle, int size, bool a
// Factor in the light attenuation further from the texture center.
float lightAttenuation = applyLightAttenuation ? rayLengthSquared : 1f;

byte value = PackIESValue(m_iesReader.InterpolateBilinear(horizontalAnglePosition, verticalAnglePosition) / (m_iesReader.MaxCandelas * lightAttenuation));
slice[x] = new Color32(value, value, value, value);
slice[x] = ComputePixelColor(horizontalAnglePosition, verticalAnglePosition, lightAttenuation);
}
}

return textureBuffer;
}

NativeArray<Color32> BuildTypeCGnomonicTexture(float coneAngle, int size, bool applyLightAttenuation)
NativeArray<Color> BuildTypeCGnomonicTexture(float coneAngle, int size, bool applyLightAttenuation)
{
float limitUV = Mathf.Tan(0.5f * coneAngle * Mathf.Deg2Rad);
float stepUV = (2 * limitUV) / (size - 3);

var textureBuffer = new NativeArray<Color32>(size * size, Allocator.Temp, NativeArrayOptions.ClearMemory);
var textureBuffer = new NativeArray<Color>(size * size, Allocator.Temp, NativeArrayOptions.ClearMemory);

// Leave a one-pixel black border around the texture to avoid cookie spilling.
for (int y = 1; y < size - 1; y++)
{
var slice = new NativeSlice<Color32>(textureBuffer, y * size, size);
var slice = new NativeSlice<Color>(textureBuffer, y * size, size);

float v = (y - 1) * stepUV - limitUV;

Expand All @@ -443,8 +441,7 @@ NativeArray<Color32> BuildTypeCGnomonicTexture(float coneAngle, int size, bool a
// Factor in the light attenuation further from the texture center.
float lightAttenuation = applyLightAttenuation ? (uvLength * uvLength + 1) : 1f;

byte value = PackIESValue(m_iesReader.InterpolateBilinear(horizontalAnglePosition, verticalAnglePosition) / (m_iesReader.MaxCandelas * lightAttenuation));
slice[x] = new Color32(value, value, value, value);
slice[x] = ComputePixelColor(horizontalAnglePosition, verticalAnglePosition, lightAttenuation);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,7 @@ uint GetIndexData(APVResources apvRes, float3 posWS)
{
float3 residualPosWS = posWS - topLeftEntryWS;
int3 localBrickIndex = floor(residualPosWS / (_MinBrickSize * stepSize));
localBrickIndex = min(localBrickIndex, (int3)(3 * 3 * 3 - 1)); // due to floating point issue, we may query an invalid brick

// Out of bounds.
isValidBrick = all(localBrickIndex >= minRelativeIdx) && all(localBrickIndex < maxRelativeIdxPlusOne);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,13 @@ void SampleBakedGI(
#elif (defined(PROBE_VOLUMES_L1) || defined(PROBE_VOLUMES_L2))
if (needToIncludeAPV)
{
EvaluateAdaptiveProbeVolume(GetAbsolutePositionWS(posInputs.positionWS), normalWS, backNormalWS, GetWorldSpaceNormalizeViewDir(posInputs.positionWS), 0.0, bakeDiffuseLighting, backBakeDiffuseLighting);
EvaluateAdaptiveProbeVolume(GetAbsolutePositionWS(posInputs.positionWS),
normalWS,
backNormalWS,
GetWorldSpaceNormalizeViewDir(posInputs.positionWS),
posInputs.positionSS,
bakeDiffuseLighting,
backBakeDiffuseLighting);
}
#elif !(defined(PROBE_VOLUMES_L1) || defined(PROBE_VOLUMES_L2)) // With APV if we aren't a lightmap we do nothing. We will default to Ambient Probe in lightloop code if APV is disabled
EvaluateLightProbeBuiltin(positionRWS, normalWS, backNormalWS, bakeDiffuseLighting, backBakeDiffuseLighting);
Expand Down Expand Up @@ -254,14 +260,15 @@ void SampleBakedGI(
SampleBakedGI(posInputs, normalWS, backNormalWS, renderingLayers, uvStaticLightmap, uvDynamicLightmap, needToIncludeAPV, bakeDiffuseLighting, backBakeDiffuseLighting);
}

float3 SampleBakedGI(float3 positionRWS, float3 normalWS, float2 uvStaticLightmap, float2 uvDynamicLightmap, bool needToIncludeAPV = false)
float3 SampleBakedGI(float3 positionRWS, float3 normalWS, uint2 positionSS, float2 uvStaticLightmap, float2 uvDynamicLightmap, bool needToIncludeAPV = false)
{
// Need PositionInputs for indexing probe volume clusters, but they are not availbile from the current SampleBakedGI() function signature.
// Need PositionInputs for indexing probe volume clusters, but they are not available from the current SampleBakedGI() function signature.
// Reconstruct.
uint renderingLayers = 0;
PositionInputs posInputs;
ZERO_INITIALIZE(PositionInputs, posInputs);
posInputs.positionWS = positionRWS;
posInputs.positionSS = positionSS;

const float3 backNormalWSUnused = 0.0;
float3 bakeDiffuseLighting;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -467,19 +467,32 @@ float ComputeCaustic(float3 V, float3 positionOS, float3 lightDirOS, BSDFData bs

// This is a test coming from Heretic demo. It is way more expensive
// and is sometimes better, sometime not than the "else" codee and don't support caustic.
void LightEyeTransform(PositionInputs posInput, BSDFData bsdfData, inout float3 positionRWS, inout float3 forward, inout float3 right, inout float3 up)
void LightEyeTransform(PositionInputs posInput, BSDFData bsdfData, inout LightData lightData)
{
float3 L = normalize(positionRWS - posInput.positionWS);
float3 L = normalize(lightData.positionRWS - posInput.positionWS);
float3 refractL = -refract(-L, bsdfData.geomNormalWS, 1.0 / bsdfData.IOR);

float3 axis = normalize(cross(L, refractL));

float angle = acos(dot(L, refractL));

positionRWS = Rotate(posInput.positionWS, positionRWS, axis, angle);
forward = Rotate(float3(0, 0, 0), forward, axis, angle);
right = Rotate(float3(0, 0, 0), right, axis, angle);
up = Rotate(float3(0, 0, 0), up, axis, angle);
lightData.positionRWS = Rotate(posInput.positionWS, lightData.positionRWS, axis, angle);
lightData.forward = Rotate(float3(0, 0, 0), lightData.forward, axis, angle);
lightData.right = Rotate(float3(0, 0, 0), lightData.right, axis, angle);
lightData.up = Rotate(float3(0, 0, 0), lightData.up, axis, angle);
}
void LightEyeTransform(PositionInputs posInput, BSDFData bsdfData, inout DirectionalLightData lightData)
{
float3 L = -lightData.forward;
float3 refractL = -refract(-L, bsdfData.geomNormalWS, 1.0 / bsdfData.IOR);

float3 axis = normalize(cross(L, refractL));

float angle = acos(dot(L, refractL));

lightData.forward = Rotate(float3(0, 0, 0), lightData.forward, axis, angle);
lightData.right = Rotate(float3(0, 0, 0), lightData.right, axis, angle);
lightData.up = Rotate(float3(0, 0, 0), lightData.up, axis, angle);
}
//-----------------------------------------------------------------------------
// EvaluateBSDF_Directional
Expand All @@ -502,7 +515,7 @@ DirectLighting EvaluateBSDF_Directional(LightLoopContext lightLoopContext,
c = ComputeCausticFromLUT(preLightData.irisPlanePosition, bsdfData.irisPlaneOffset, lightPosOS, bsdfData.causticIntensity);
}
// Evaluate a second time the light but for a different position and for diffuse only.
LightEyeTransform(posInput, bsdfData, lightData.positionRWS, lightData.forward, lightData.right, lightData.up);
LightEyeTransform(posInput, bsdfData, lightData);

DirectLighting dlIris = ShadeSurface_Directional( lightLoopContext, posInput, builtinData,
preLightData, lightData, bsdfData, V);
Expand Down Expand Up @@ -541,7 +554,7 @@ DirectLighting EvaluateBSDF_Punctual(LightLoopContext lightLoopContext,
c = ComputeCausticFromLUT(preLightData.irisPlanePosition, bsdfData.irisPlaneOffset, TransformWorldToObject(lightData.positionRWS), bsdfData.causticIntensity);
}
// Evaluate a second time the light but for a different position and for diffuse only.
LightEyeTransform(posInput, bsdfData, lightData.positionRWS, lightData.forward, lightData.right, lightData.up);
LightEyeTransform(posInput, bsdfData, lightData);

DirectLighting dlIris = ShadeSurface_Punctual( lightLoopContext, posInput, builtinData,
preLightData, lightData, bsdfData, V);
Expand Down Expand Up @@ -728,7 +741,7 @@ DirectLighting EvaluateBSDF_Area(LightLoopContext lightLoopContext,
c = ComputeCausticFromLUT(preLightData.irisPlanePosition, bsdfData.irisPlaneOffset, TransformWorldToObject(lightData.positionRWS), bsdfData.causticIntensity);
}

LightEyeTransform(posInput, bsdfData, lightData.positionRWS, lightData.forward, lightData.right, lightData.up);
LightEyeTransform(posInput, bsdfData, lightData);

DirectLighting dl2;
if (lightData.lightType == GPULIGHTTYPE_TUBE)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ float3 GetSharpenedResult(float3 original, float2 UV)
[numthreads(8, 8, 1)]
void SharpenCS(uint3 dispatchThreadId : SV_DispatchThreadID)
{
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);

float2 UV = (dispatchThreadId.xy + 0.5) * _PostProcessScreenSize.zw;

float4 original = _InputTexture[COORD_TEXTURE2D_X(dispatchThreadId.xy)];
Expand Down
Loading

0 comments on commit a297a97

Please sign in to comment.