2525// Epslon value used for the computation
2626#define GI_TRACE_EPS 0.00024414
2727
28+ #define PERCEPTUAL_SPACE
29+
2830// Input depth pyramid texture
2931TEXTURE2D_X(_DepthTexture);
3032// Input texture that holds the offset for every level of the depth pyramid
@@ -42,6 +44,7 @@ CBUFFER_START(UnityScreenSpaceGlobalIllumination)
4244 int _IndirectDiffuseProbeFallbackBias;
4345 float4 _ColorPyramidUvScaleAndLimitPrevFrame;
4446 int _SsrStencilBit;
47+ int _IndirectDiffuseFrameIndex;
4548CBUFFER_END
4649
4750// Output texture that holds the hit point NDC coordinates
@@ -62,7 +65,7 @@ bool RayMarch(float3 positionWS, float3 sampleDir, float3 normalWS, float2 posit
6265 // We start tracing from the center of the current pixel, and do so up to the far plane.
6366 float3 rayOrigin = float3(positionSS + 0.5, deviceDepth);
6467
65- float3 sampledPosWS = positionWS + sampleDir * 0.01 ;
68+ float3 sampledPosWS = positionWS + sampleDir * 0.001 ;
6669 float3 sampledPosNDC = ComputeNormalizedDeviceCoordinatesWithZ(sampledPosWS, UNITY_MATRIX_VP); // Jittered
6770 float3 sampledPosSS = float3(sampledPosNDC.xy * _ScreenSize.xy, sampledPosNDC.z);
6871
@@ -206,27 +209,20 @@ void TRACE_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID, uin
206209 uint2 currentCoord = dispatchThreadId.xy;
207210
208211#if HALF_RES
209- // Fetch the mipoffset for the second mip (half res)
210- int2 mipOffset = _DepthPyramidMipLevelOffsets[1];
211- // Grab the depth of the half res pixel (better than grabbing a random one across the 4 candidates or averaging)
212- float deviceDepth = LOAD_TEXTURE2D_X(_DepthTexture, mipOffset + currentCoord).x;
213212 // Compute the full resolution pixel for the inputs that do not have a pyramid
214- uint2 fullResCoord = currentCoord * 2;
215- #else
213+ currentCoord = currentCoord * 2;
214+ #endif
215+
216216 // Read the depth value as early as possible
217217 float deviceDepth = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
218- #endif
218+
219219 // Initialize the hitpoint texture to a miss
220- _IndirectDiffuseHitPointTextureRW[COORD_TEXTURE2D_X(currentCoord )] = float2(99.0, 0.0);
220+ _IndirectDiffuseHitPointTextureRW[COORD_TEXTURE2D_X(dispatchThreadId.xy )] = float2(99.0, 0.0);
221221
222222 // Read the pixel normal
223223 NormalData normalData;
224- #if HALF_RES
225- // For half resolution, we take the top left corner (0, 0) in the upscaled 2x2 pixel neighborhood
226- DecodeFromNormalBuffer(fullResCoord.xy, normalData);
227- #else
228224 DecodeFromNormalBuffer(currentCoord.xy, normalData);
229- #endif
225+
230226 // Generete a new direction to follow
231227 float2 newSample;
232228 newSample.x = GetBNDSequenceSample(currentCoord.xy, _RaytracingFrameIndex, 0);
@@ -241,11 +237,7 @@ void TRACE_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID, uin
241237 // If this is a background pixel, we flag the ray as a dead ray (we are also trying to keep the usage of the depth buffer the latest possible)
242238 bool killRay = deviceDepth == UNITY_RAW_FAR_CLIP_VALUE;
243239 // Convert this to a world space position (camera relative)
244- #if HALF_RES
245- PositionInputs posInput = GetPositionInput(fullResCoord, _ScreenSize.zw, deviceDepth, UNITY_MATRIX_I_VP, GetWorldToViewMatrix(), 0);
246- #else
247240 PositionInputs posInput = GetPositionInput(currentCoord, _ScreenSize.zw, deviceDepth, UNITY_MATRIX_I_VP, GetWorldToViewMatrix(), 0);
248- #endif
249241
250242 // Compute the view direction (world space)
251243 float3 viewWS = GetWorldSpaceNormalizeViewDir(posInput.positionWS);
@@ -266,7 +258,7 @@ void TRACE_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID, uin
266258 // recompute it using the last value of 't', which would result in an overshoot.
267259 // It also needs to be precisely at the center of the pixel to avoid artifacts.
268260 float2 hitPositionNDC = floor(rayPos.xy) * _ScreenSize.zw + (0.5 * _ScreenSize.zw); // Should we precompute the half-texel bias? We seem to use it a lot.
269- _IndirectDiffuseHitPointTextureRW[COORD_TEXTURE2D_X(currentCoord )] = hitPositionNDC;
261+ _IndirectDiffuseHitPointTextureRW[COORD_TEXTURE2D_X(dispatchThreadId.xy )] = hitPositionNDC;
270262 }
271263}
272264
@@ -310,19 +302,14 @@ void REPROJECT_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID,
310302 uint2 currentCoord = groupId * INDIRECT_DIFFUSE_TILE_SIZE + groupThreadId;
311303
312304#if HALF_RES
313- // Fetch the mipoffset for the second mip (given that we are in half res)
314- int2 mipOffset = _DepthPyramidMipLevelOffsets[1];
315- float deviceDepth = LOAD_TEXTURE2D_X(_DepthTexture, mipOffset + currentCoord).x;
316-
317305 // Compute the full resolution pixel for the inputs that do not have a pyramid
318- uint2 fullResCoord = currentCoord * 2;
319- #else
320- // Fetch the depth of the current pixel
321- float deviceDepth = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
306+ currentCoord = currentCoord * 2;
322307#endif
323308
309+ float deviceDepth = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
310+
324311 // Read the hit point ndc position to fetch
325- float2 hitPositionNDC = LOAD_TEXTURE2D_X(_IndirectDiffuseHitPointTexture, currentCoord ).xy;
312+ float2 hitPositionNDC = LOAD_TEXTURE2D_X(_IndirectDiffuseHitPointTexture, dispatchThreadId.xy ).xy;
326313
327314 // Grab the depth of the hit point
328315 float hitPointDepth = LOAD_TEXTURE2D_X(_DepthTexture, hitPositionNDC * _ScreenSize.xy).x;
@@ -336,7 +323,7 @@ void REPROJECT_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID,
336323
337324 // Fetch the motion vector of the current target pixel
338325 float2 motionVectorNDC;
339- DecodeMotionVector(SAMPLE_TEXTURE2D_X_LOD(_CameraMotionVectorsTexture, s_linear_clamp_sampler, min( hitPositionNDC, 1.0f - 0.5f * _ScreenSize.zw) * _RTHandleScale.xy , 0), motionVectorNDC);
326+ DecodeMotionVector(SAMPLE_TEXTURE2D_X_LOD(_CameraMotionVectorsTexture, s_linear_clamp_sampler, hitPositionNDC, 0), motionVectorNDC);
340327
341328 float2 prevFrameNDC = hitPositionNDC - motionVectorNDC;
342329 float2 prevFrameUV = prevFrameNDC * _ColorPyramidUvScaleAndLimitPrevFrame.xy;
@@ -366,20 +353,17 @@ void REPROJECT_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID,
366353
367354 // We need to recreate the direction that was generated
368355 float2 newSample;
369- newSample.x = GetBNDSequenceSample(currentCoord.xy, _RaytracingFrameIndex , 0);
370- newSample.y = GetBNDSequenceSample(currentCoord.xy, _RaytracingFrameIndex , 1);
356+ newSample.x = GetBNDSequenceSample(currentCoord.xy, _IndirectDiffuseFrameIndex , 0);
357+ newSample.y = GetBNDSequenceSample(currentCoord.xy, _IndirectDiffuseFrameIndex , 1);
371358
372359 // Read the pixel normal
373360 NormalData normalData;
374- #if HALF_RES
375- // For half resolution, we take the top left corner (0, 0) in the upscaled 2x2 pixel neighborhood
376- DecodeFromNormalBuffer(fullResCoord.xy, normalData);
377- #else
378361 DecodeFromNormalBuffer(currentCoord.xy, normalData);
379- #endif
380362
363+ #ifdef PERCEPTUAL_SPACE
381364 // We tone map the signal. Due to the very small budget for denoising, we need to compress the range of the signal
382365 color = color / (1.0 + color);
366+ #endif
383367
384368 // Re-compute the direction that was used to do the generation
385369 float3 sampleDir = SampleHemisphereCosine(newSample.x, newSample.y, normalData.normalWS);
@@ -392,8 +376,8 @@ void REPROJECT_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID,
392376 // We are simply interested to know if the intersected pixel was moving, so we multiply it by a big number
393377 // TODO: make this process not binary
394378 // Write the output to the target pixel
395- _IndirectDiffuseTexture0RW[COORD_TEXTURE2D_X(currentCoord )] = float4(outYSH);
396- _IndirectDiffuseTexture1RW[COORD_TEXTURE2D_X(currentCoord )] = float4(outCoCg, invalid ? 0.0 : 1.0, length(motionVectorNDC * 10000.0f));
379+ _IndirectDiffuseTexture0RW[COORD_TEXTURE2D_X(dispatchThreadId.xy )] = float4(outYSH);
380+ _IndirectDiffuseTexture1RW[COORD_TEXTURE2D_X(dispatchThreadId.xy )] = float4(outCoCg, invalid ? 0.0 : 1.0, length(motionVectorNDC * 10000.0f));
397381}
398382
399383void ConvertYCoCgToRGBUtil(float4 inYSH, float2 inCoCg, float3 inNormal, out float3 outColor)
@@ -423,54 +407,47 @@ void CONVERT_YCOCG_TO_RGB(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 gr
423407 UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
424408
425409 // Fetch the current pixel coordinate
426- uint2 currentCoord = groupId * INDIRECT_DIFFUSE_TILE_SIZE + groupThreadId ;
410+ uint2 currentCoord = dispatchThreadId.xy ;
427411
428412 // If the depth of this pixel is the depth of the background, we can end the process right away
429413#if HALF_RES
430- // Fetch the mipoffset for the second mip (given that we are in half res)
431- int2 mipOffset = _DepthPyramidMipLevelOffsets[1];
432- float deviceDepth = LOAD_TEXTURE2D_X(_DepthTexture, mipOffset + currentCoord).x;
414+ currentCoord = currentCoord * 2;
415+ #endif
433416
434- // Compute the full resolution pixel for the inputs that do not have a pyramid
435- uint2 fullResCoord = currentCoord * 2;
436- #else
437417 // Fetch the depth of the current pixel
438418 float deviceDepth = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
439- #endif
419+
440420 if (deviceDepth == UNITY_RAW_FAR_CLIP_VALUE)
441421 {
442- _IndirectDiffuseTexture0RW[COORD_TEXTURE2D_X(currentCoord )] = float4(0.0, 0.0, 0.0, 0.0);
422+ _IndirectDiffuseTexture0RW[COORD_TEXTURE2D_X(dispatchThreadId.xy )] = float4(0.0, 0.0, 0.0, 0.0);
443423 return;
444424 }
445425
446426 // Fetch the normal
447427 NormalData normalData;
448- #if HALF_RES
449- // For half resolution, we take the top left corner (0, 0) in the upscaled 2x2 pixel neighborhood
450- DecodeFromNormalBuffer(fullResCoord.xy, normalData);
451- #else
452428 DecodeFromNormalBuffer(currentCoord.xy, normalData);
453- #endif
454429
455430 // Convert the signal back to a color
456431 float3 color;
457- float4 ySH = _IndirectDiffuseTexture0RW[COORD_TEXTURE2D_X(currentCoord )];
458- float3 cocgB = LOAD_TEXTURE2D_X(_IndirectDiffuseTexture1, currentCoord ).xyz;
432+ float4 ySH = _IndirectDiffuseTexture0RW[COORD_TEXTURE2D_X(dispatchThreadId.xy )];
433+ float3 cocgB = LOAD_TEXTURE2D_X(_IndirectDiffuseTexture1, dispatchThreadId.xy ).xyz;
459434 ConvertYCoCgToRGBUtil(ySH, cocgB.xy, normalData.normalWS, color);
460435
436+ #ifdef PERCEPTUAL_SPACE
461437 // We invert the tonemap
462438 color = color / (1.0 - color);
463439
464440 // The mulitplication is wrong, but with all the approximations that we need to compensate a bit
465441 // the fact that the signal was significantly attenuated (due to blurring in tonemapped space to reduce the blobbyness).
466442 // This has been experimentally tested. However, it needs more testing and potetially reverted if found more harmful than useful
467- color *= (lerp(2.5, 1.0, cocgB.z));
443+ color *= (lerp(5.0, 1.0, cocgB.z));
444+ #endif
468445
469446 // Does this pixel recieve SSGI?
470447 uint stencilValue = GetStencilValue(LOAD_TEXTURE2D_X(_StencilTexture, currentCoord));
471448 if ((stencilValue & _SsrStencilBit) == 0)
472449 cocgB.z = 0.0;
473450
474451 // Output the color as well as the blend factor
475- _IndirectDiffuseTexture0RW[COORD_TEXTURE2D_X(currentCoord )] = float4(color, cocgB.z);
452+ _IndirectDiffuseTexture0RW[COORD_TEXTURE2D_X(dispatchThreadId.xy )] = float4(color, cocgB.z);
476453}
0 commit comments