Files
UnrealEngineUWP/Engine/Shaders/Private/PlanarReflectionShared.ush
jon cain c35250b1f4 Changes to various volumetric areas to allow them to work with Orthographic rendering. This includes:
Sky, Cloud and Fog
- Created 2 pathways for resolving these volumetrics, a typical Ortho view which is tightened to the camera width and height (ignoring depth) and a pseudo FOV option which resolves a 90 degree viewing angle of the sky for a wider scene view.
- Enabled AO resolving for cloud etc definition.
- Planar reflections fixed to allow this on various surfaces.
- Motion Blur improvements due to large depth diffs in ortho cameras between near (character) and far (cloud) objects
- Disabled VRT option as it doesn't resolve well without Ortho having access to depth velocities for compensating velocity diffs, and ultimately isn't entirely necessary at this stage.
- Clamped sky materials to the far plane as they will otherwise will be clipped. Materials not marked sky but being used for the sky dome can't be recognised by the engine.

#jira UE-184625, UE-196665
#rb Sebastien.Hillaire, tim.doerries

[CL 33069489 by jon cain in ue5-main branch]
2024-04-18 08:55:27 -04:00

84 lines
4.8 KiB
Plaintext

// Copyright Epic Games, Inc. All Rights Reserved.
/*=============================================================================
PlanarReflectionShared.usf
=============================================================================*/
half4 ComputePlanarReflections(float3 TranslatedWorldPosition, half3 WorldNormal, half Roughness)
{
half4 OutPlanarReflection = 0;
float PlaneDistance = dot(PlanarReflectionStruct.ReflectionPlane, float4(TranslatedWorldPosition, -1));
half DistanceFade = 1 - saturate(abs(PlaneDistance) * PlanarReflectionStruct.PlanarReflectionParameters.x + PlanarReflectionStruct.PlanarReflectionParameters.y);
float3 PlaneOriginToWorldPosition = TranslatedWorldPosition - PlanarReflectionStruct.PlanarReflectionOrigin.xyz;
float XAxisDistance = dot(PlaneOriginToWorldPosition, PlanarReflectionStruct.PlanarReflectionXAxis.xyz);
half XAxisFade = saturate((PlanarReflectionStruct.PlanarReflectionXAxis.w - abs(XAxisDistance)) * PlanarReflectionStruct.PlanarReflectionParameters.x);
float YAxisDistance = dot(PlaneOriginToWorldPosition, PlanarReflectionStruct.PlanarReflectionYAxis.xyz);
half YAxisFade = saturate((PlanarReflectionStruct.PlanarReflectionYAxis.w - abs(YAxisDistance)) * PlanarReflectionStruct.PlanarReflectionParameters.x);
DistanceFade *= XAxisFade * YAxisFade;
half AngleFade = saturate(dot(PlanarReflectionStruct.ReflectionPlane.xyz, WorldNormal) * PlanarReflectionStruct.PlanarReflectionParameters2.x + PlanarReflectionStruct.PlanarReflectionParameters2.y);
half RoughnessFade = 1 - saturate((Roughness - .2f) * 10.0f);
half FinalFade = DistanceFade * AngleFade * RoughnessFade;
BRANCH
if (FinalFade > 0)
{
// CameraToPixel in the main view is what we used as ReflectionVector when rendering the reflection pass to PlanarReflectionTexture
float3 CameraToPixel = GetCameraVectorFromTranslatedWorldPosition(ResolvedView, TranslatedWorldPosition);
// Reflect the effective ReflectionVector in mirrored space to get the original camera vector
float3 MirroredCameraVector = reflect(CameraToPixel, -PlanarReflectionStruct.ReflectionPlane.xyz);
// Transform the GBuffer normal into mirrored space
half3 MirroredNormal = mul(WorldNormal, PlanarReflectionStruct.InverseTransposeMirrorMatrix).xyz;
// Reflect the original camera vector across the GBuffer normal in mirrored space
half3 MirroredReflectionVectorOffNormal = reflect(MirroredCameraVector, MirroredNormal);
// At this point we have a new reflection vector off of the GBuffer normal, and we need to approximate its intersection with the scene
// An accurate intersection would ray trace the planar reflection depth buffer
// As an approximation we are just intersecting with a user defined sphere
float3 VirtualReflectionSpherePosition = TranslatedWorldPosition + MirroredReflectionVectorOffNormal * PlanarReflectionStruct.PlanarReflectionParameters.z;
// Transform the intersection position into view space
float3 ViewVirtualReflectionSpherePosition = mul(float4(VirtualReflectionSpherePosition, 1), ResolvedView.TranslatedWorldToView).xyz;
// Transform the intersection position into clip space using the same projection matrix used to render PlanarReflectionTexture
float4 ClipVirtualReflectionSpherePosition = mul(float4(ViewVirtualReflectionSpherePosition, 1), PlanarReflectionStruct.ProjectionWithExtraFOV[ResolvedView.StereoPassIndex]);
uint EyeIndex = 0;
if (PlanarReflectionStruct.bIsStereo)
{
EyeIndex = ResolvedView.StereoPassIndex;
}
half2 NDC = clamp(ClipVirtualReflectionSpherePosition.xy / ClipVirtualReflectionSpherePosition.w, -PlanarReflectionStruct.PlanarReflectionScreenBound, PlanarReflectionStruct.PlanarReflectionScreenBound);
half2 ViewportUV = NDC * PlanarReflectionStruct.PlanarReflectionScreenScaleBias[EyeIndex].xy + PlanarReflectionStruct.PlanarReflectionScreenScaleBias[EyeIndex].zw;
half4 PlanarReflectionTextureValue = Texture2DSampleLevel(
PlanarReflectionStruct.PlanarReflectionTexture,
#if SUPPORTS_INDEPENDENT_SAMPLERS
View.SharedTrilinearClampedSampler,
#else
PlanarReflectionStruct.PlanarReflectionSampler,
#endif
ViewportUV,
0);
// Fade out in regions of the planar reflection that weren't written to, so we can composite with other reflection methods
FinalFade *= PlanarReflectionTextureValue.a;
OutPlanarReflection.rgb = PlanarReflectionTextureValue.rgb * RoughnessFade; // Add roughness fade to color to provide smooth color transition.
OutPlanarReflection.a = FinalFade;
}
return OutPlanarReflection;
}
#if (FEATURE_LEVEL <= FEATURE_LEVEL_ES3_1)
half4 GetPlanarReflection(float3 TranslatedWorldPosition, half3 WorldNormal, half Roughness)
{
half4 PlanarReflection = ComputePlanarReflections(TranslatedWorldPosition, WorldNormal, Roughness);
#if OUTPUT_GAMMA_SPACE
// the capture will also be in gamma space, convert to linear:
PlanarReflection.rgb *= PlanarReflection.rgb;
#endif
return PlanarReflection;
}
#endif