Pixel shader HeightFog for mobile

I am trying to add pixel shader version of Height fog for mobile. This is needed for certain floor meshes because low vertex count. I used exactly same style that BasePassPixelShader.usf use but just in MobileBasePassPixelShader.usf.
Code is fine but for some reason all fog uniform value are garbage. Just outputting ExponentialFogColorParameter.rgb as output color results NaN or Inf bugs and everything is super bright. So question is what am I missing. I have included right file and just calling CalculateHeightFog(MaterialParameters.WorldPosition_CamRelative); at pixel shader. So code is simple.
This bug happen on mobile preview and on actual ios device it’s just crashes.
// Copyright 1998-2018 Epic Games, Inc. All Rights Reserved.

/*=============================================================================
	MobileBasePassPixelShader.usf: Base pass pixel shader used with forward shading
=============================================================================*/

#include "Common.ush"
#include "SHCommon.ush"
#include "MobileBasePassCommon.ush"
#include "/Engine/Generated/Material.ush"
#include "/Engine/Generated/VertexFactory.ush"
#include "ReflectionEnvironmentShared.ush"
#include "LightmapCommon.ush"  
#include "BRDF.ush"
#include "SHCommon.ush"
#include "ShadowFilteringCommon.ush"
#include "DynamicLightingCommon.ush"
#include "PlanarReflectionShared.ush"
#include "HeightFogCommon.ush"

// Add fading CSM plane:
#define FADE_CSM 1

#if MAX_DYNAMIC_POINT_LIGHTS > 0
#if VARIABLE_NUM_DYNAMIC_POINT_LIGHTS
int NumDynamicPointLights;
#endif
float4 LightPositionAndInvRadius[MAX_DYNAMIC_POINT_LIGHTS];
float4 LightColorAndFalloffExponent[MAX_DYNAMIC_POINT_LIGHTS];
#endif

#ifndef MOBILE_QL_FORCE_FULLY_ROUGH
#define MOBILE_QL_FORCE_FULLY_ROUGH 0
#endif
#ifndef MOBILE_QL_FORCE_NONMETAL
#define MOBILE_QL_FORCE_NONMETAL 0
#endif
#ifndef MOBILE_QL_FORCE_LQ_REFLECTIONS
#define MOBILE_QL_FORCE_LQ_REFLECTIONS 0
#endif

#ifndef MOBILE_CSM_QUALITY
#define MOBILE_CSM_QUALITY 2
#endif

#define FULLY_ROUGH (MATERIAL_FULLY_ROUGH || MOBILE_QL_FORCE_FULLY_ROUGH)
#define NONMETAL (MATERIAL_NONMETAL || MOBILE_QL_FORCE_NONMETAL)
#define HQ_REFLECTIONS (MATERIAL_HQ_FORWARD_REFLECTIONS && !MOBILE_QL_FORCE_LQ_REFLECTIONS)

#define ALLOW_CUBE_REFLECTIONS 0

/** Prenormalized capture of the scene that's closest to the object being rendered. */
#if !FULLY_ROUGH
TextureCube ReflectionCubemap;
SamplerState ReflectionCubemapSampler;
float3 InvReflectionCubemapAverageBrightness;
	#if HQ_REFLECTIONS
	#define MAX_HQ_REFLECTIONS 3

	TextureCube ReflectionCubemap1;
	SamplerState ReflectionCubemapSampler1;
	TextureCube ReflectionCubemap2;
	SamplerState ReflectionCubemapSampler2;
	float4 ReflectionPositionsAndRadii[MAX_HQ_REFLECTIONS];
		#if ALLOW_CUBE_REFLECTIONS
		float4x4 CaptureBoxTransformArray[MAX_HQ_REFLECTIONS];
		float4 CaptureBoxScalesArray[MAX_HQ_REFLECTIONS];
		#endif
	#endif
#endif

half PhongApprox( half Roughness, half RoL )
{
	half a = Roughness * Roughness;			// 1 mul
	//!! Ronin Hack?
	a = max(a, 0.008);						// avoid underflow in FP16, next sqr should be bigger than 6.1e-5
	half a2 = a * a;						// 1 mul
	half rcp_a2 = rcp(a2);					// 1 rcp
	//half rcp_a2 = exp2( -6.88886882 * Roughness + 6.88886882 );

	// Spherical Gaussian approximation: pow( x, n ) ~= exp( (n + 0.775) * (x - 1) )
	// Phong: n = 0.5 / a2 - 0.5
	// 0.5 / ln(2), 0.275 / ln(2)
	half c = 0.72134752 * rcp_a2 + 0.39674113;	// 1 mad
	half p = rcp_a2 * exp2(c * RoL - c);		// 2 mad, 1 exp2, 1 mul
	// Total 7 instr
	return min(p, rcp_a2);						// Avoid overflow/underflow on Mali GPUs
}

#if !FULLY_ROUGH
#if HQ_REFLECTIONS

float4 GetSphereCaptureVector(float3 ReflectionVector, float3 WorldPosition, float4 CapturePositionAndRadius)
{
	float4 ProjectedCaptureVector;
	ProjectedCaptureVector.w = 0;
	ProjectedCaptureVector.xyz = ReflectionVector;

	float3 RayDirection = ReflectionVector;
	float ProjectionSphereRadius = CapturePositionAndRadius.w * 1.2f;
	float SphereRadiusSquared = ProjectionSphereRadius * ProjectionSphereRadius;

	float3 ReceiverToSphereCenter = WorldPosition - CapturePositionAndRadius.xyz;
	float ReceiverToSphereCenterSq = dot(ReceiverToSphereCenter, ReceiverToSphereCenter);

	float3 CaptureVector = WorldPosition - CapturePositionAndRadius.xyz;
	float CaptureVectorLength = sqrt(dot(CaptureVector, CaptureVector));
	float NormalizedDistanceToCapture = saturate(CaptureVectorLength / CapturePositionAndRadius.w);

	// Find the intersection between the ray along the reflection vector and the capture's sphere
	float3 QuadraticCoef;
	QuadraticCoef.x = 1;
	QuadraticCoef.y = 2 * dot(RayDirection, ReceiverToSphereCenter);
	QuadraticCoef.z = ReceiverToSphereCenterSq - SphereRadiusSquared;

	float Determinant = QuadraticCoef.y * QuadraticCoef.y - 4 * QuadraticCoef.z;

	BRANCH
	// Only continue if the ray intersects the sphere
	if (Determinant >= 0)
	{
		float FarIntersection = (sqrt(Determinant) - QuadraticCoef.y) * 0.5;

		float3 IntersectPosition = WorldPosition + FarIntersection * RayDirection;
		ProjectedCaptureVector.xyz = IntersectPosition - CapturePositionAndRadius.xyz;
		// Fade out based on distance to capture
		ProjectedCaptureVector.w = 1.0 - smoothstep(.6, 1, NormalizedDistanceToCapture);
	}
	return ProjectedCaptureVector;
}

#if ALLOW_CUBE_REFLECTIONS
float4 GetBoxCaptureVector(float3 ReflectionVector, float3 WorldPosition, float4 CapturePositionAndRadius, float4x4 CaptureBoxTransform, float4 CaptureBoxScales)
{
	float4 ProjectedCaptureVector;
	ProjectedCaptureVector.w = 0;
	ProjectedCaptureVector.xyz = ReflectionVector;

	float3 RayDirection = ReflectionVector * CapturePositionAndRadius.w * 2;

	// Transform the ray into the local space of the box, where it is an AABB with mins at -1 and maxs at 1
	float3 LocalRayStart = mul(float4(WorldPosition, 1), CaptureBoxTransform).xyz;
	float3 LocalRayDirection = mul(RayDirection, (float3x3)CaptureBoxTransform);

	// Intersections.y is the intersection with the far side of the box
	float2 Intersections = LineBoxIntersect(LocalRayStart, LocalRayStart + LocalRayDirection, -1, 1);

	{
		// Compute the reprojected vector
		float3 IntersectPosition = WorldPosition + Intersections.y * RayDirection;
		ProjectedCaptureVector.xyz = IntersectPosition - CapturePositionAndRadius.xyz;

		// Compute the distance from the receiving pixel to the box for masking
		// Apply local to world scale to take scale into account without transforming back to world space
		// Shrink the box by the transition distance (BoxScales.w) so that the fade happens inside the box influence area
		float4 BoxScales = CaptureBoxScales;
		float BoxDistance = ComputeDistanceFromBoxToPoint(-(BoxScales.xyz - .5f * BoxScales.w), BoxScales.xyz - .5f * BoxScales.w, LocalRayStart * BoxScales.xyz);
		// Setup a fade based on receiver distance to the box, hides the box influence shape
		float BoxDistanceAlpha = 1.0 - smoothstep(0, .7f * BoxScales.w, BoxDistance);
		// Setup a fade based on reflection ray intersection distance, hides the discontinuity between rays that just barely 
		float RayDistanceAlpha = smoothstep(0, BoxScales.w, Intersections.y * CapturePositionAndRadius.w * 2);
		ProjectedCaptureVector.w = BoxDistanceAlpha * RayDistanceAlpha;
	}
	return ProjectedCaptureVector;
}
#endif

half3 BlendReflectionCapture(FMaterialPixelParameters MaterialParameters, half Roughness, TextureCube ReflectionCube, SamplerState ReflectionSampler, half MipLevel, int ReflectionIndex, half InvReflectionCubemapAverageBrightness, inout half BlendFactor)
{
	half4 ProjectedCaptureVector;
#if ALLOW_CUBE_REFLECTIONS
	if (CaptureBoxScalesArray[ReflectionIndex].w > 0)
	{
		ProjectedCaptureVector = GetBoxCaptureVector(MaterialParameters.ReflectionVector, MaterialParameters.AbsoluteWorldPosition, ReflectionPositionsAndRadii[ReflectionIndex], CaptureBoxTransformArray[ReflectionIndex], CaptureBoxScalesArray[ReflectionIndex]);
	}
	else
#endif
	{
		ProjectedCaptureVector = GetSphereCaptureVector(MaterialParameters.ReflectionVector, MaterialParameters.AbsoluteWorldPosition, ReflectionPositionsAndRadii[ReflectionIndex]);
	}

	// Fetch from cubemap and convert to linear HDR
	float4 Reflection = ReflectionCube.SampleLevel(ReflectionSampler, ProjectedCaptureVector.xyz, MipLevel);
	half3 SpecularIBL = RGBMDecode(Reflection, 16.0f);
	SpecularIBL = SpecularIBL * SpecularIBL * (InvReflectionCubemapAverageBrightness * ProjectedCaptureVector.w * BlendFactor);

	BlendFactor = BlendFactor * (1.0 - ProjectedCaptureVector.w);
	return SpecularIBL;
}

half3 BlendReflectionCaptures(FMaterialPixelParameters MaterialParameters, half Roughness)
{
	// Compute fractional mip from roughness
	half AbsoluteSpecularMip = ComputeReflectionCaptureMipFromRoughness(Roughness, ResolvedView.ReflectionCubemapMaxMip);

	half3 SpecularIBL = half3(0, 0, 0);
	half BlendFactor = 1;

	SpecularIBL += BlendReflectionCapture(MaterialParameters, Roughness, ReflectionCubemap, ReflectionCubemapSampler, AbsoluteSpecularMip, 0, InvReflectionCubemapAverageBrightness.x, BlendFactor);
	SpecularIBL += BlendReflectionCapture(MaterialParameters, Roughness, ReflectionCubemap1, ReflectionCubemapSampler1, AbsoluteSpecularMip, 1, InvReflectionCubemapAverageBrightness.y, BlendFactor);
	SpecularIBL += BlendReflectionCapture(MaterialParameters, Roughness, ReflectionCubemap2, ReflectionCubemapSampler2, AbsoluteSpecularMip, 2, InvReflectionCubemapAverageBrightness.z, BlendFactor);

	return SpecularIBL;
}
#endif // HQ_REFLECTIONS

float4 GetPlanarReflection(float3 WorldPosition, float3 WorldNormal, float Roughness)
{
	float4 PlanarReflection = ComputePlanarReflections(WorldPosition, WorldNormal, Roughness, PlanarReflectionSampler);
#if OUTPUT_GAMMA_SPACE
	// the capture will also be in gamma space, convert to linear:
	PlanarReflection.rgb *= PlanarReflection.rgb;
	// the capture has applied exposure scale so need to cancel that.
#if !MATERIALBLENDING_MODULATE
	PlanarReflection.rgb /= ResolvedView.PreExposure;
#endif
#endif
	return PlanarReflection;
}

half3 GetImageBasedReflectionLighting(FMaterialPixelParameters MaterialParameters, half Roughness)
{
#if HQ_REFLECTIONS
	half3 SpecularIBL = BlendReflectionCaptures(MaterialParameters, Roughness);
#else

	half3 ProjectedCaptureVector = MaterialParameters.ReflectionVector;	

	// Compute fractional mip from roughness
	half AbsoluteSpecularMip = ComputeReflectionCaptureMipFromRoughness(Roughness, ResolvedView.ReflectionCubemapMaxMip);
	// Fetch from cubemap and convert to linear HDR
	half3 SpecularIBL = RGBMDecode(ReflectionCubemap.SampleLevel(ReflectionCubemapSampler, ProjectedCaptureVector, AbsoluteSpecularMip), 16.0f);
	SpecularIBL = SpecularIBL * SpecularIBL * InvReflectionCubemapAverageBrightness.x;
#endif
	
#if WEBGL
	// need a rgb swizzle instead of the existing rgba swizzle, we should add it if another use case comes up. 
	return SpecularIBL.bgr;
#else
	return SpecularIBL;
#endif
}
#endif //!FULLY_ROUGH

half3 FrameBufferBlendOp(half4 Source)
{
#if (COMPILER_GLSL_ES2)
	half4 Dest = FramebufferFetchES2();
	Dest = half4(RGBTDecode8BPC(Dest, DEFAULT_32BPPHDR_ENCODED_RANGE), 0.0f);
#else
	half4 Dest = half4 (0,0,0,0);
#endif

#if MATERIALBLENDING_SOLID
	return Source.rgb;
#elif MATERIALBLENDING_MASKED
	return Source.rgb;
// AlphaComposite will set both MATERIALBLENDING_TRANSLUCENT and MATERIALBLENDING_ALPHACOMPOSITE defines
// so ensure  MATERIALBLENDING_ALPHACOMPOSITE gets first in line
#elif MATERIALBLENDING_ALPHACOMPOSITE
	return Source.rgb + (Dest.rgb*(1.0 - Source.a));
#elif MATERIALBLENDING_TRANSLUCENT
	return (Source.rgb*Source.a) + (Dest.rgb*(1.0 - Source.a));
#elif MATERIALBLENDING_ADDITIVE
	return Source.rgb + Dest.rgb;
#elif MATERIALBLENDING_MODULATE
	return Source.rgb * Dest.rgb;
#endif
}

void Main( 
	FVertexFactoryInterpolantsVSToPS Interpolants
	, FMobileBasePassInterpolantsVSToPS BasePassInterpolants
	, in float4 SvPosition : SV_Position
	OPTIONAL_IsFrontFace
	, out half4 OutColor	: SV_Target0
	)
{
#if MOBILE_MULTI_VIEW
	ResolvedView = ResolveView(uint(BasePassInterpolants.MultiViewId));
#else
	ResolvedView = ResolveView();
#endif

#if USE_PS_CLIP_PLANE
	clip(BasePassInterpolants.OutClipDistance);
#endif

#if PACK_INTERPOLANTS
	float4 PackedInterpolants[NUM_VF_PACKED_INTERPOLANTS];
	VertexFactoryUnpackInterpolants(Interpolants, PackedInterpolants);
#endif

#if (COMPILER_GLSL_ES2 || COMPILER_GLSL_ES3_1 || COMPILER_GLSL_ES3_1_EXT) && OUTPUT_GAMMA_SPACE && !MOBILE_EMULATION
	// LDR ES2 needs screen vertical flipped
	SvPosition.y = ResolvedView.BufferSizeAndInvSize.y - SvPosition.y - 1;
#endif

	FMaterialPixelParameters MaterialParameters = GetMaterialPixelParameters(Interpolants, SvPosition);
	FPixelMaterialInputs PixelMaterialInputs;
	{
		float4 ScreenPosition = SvPositionToResolvedScreenPosition(SvPosition);
#if WEBGL
		float3 WorldPosition = BasePassInterpolants.PixelPosition.xyz;
		float3 WorldPositionExcludingWPO = BasePassInterpolants.PixelPosition.xyz;
#else
		float3 WorldPosition = BasePassInterpolants.PixelPosition;
		float3 WorldPositionExcludingWPO = BasePassInterpolants.PixelPosition;
#endif
		#if USE_WORLD_POSITION_EXCLUDING_SHADER_OFFSETS
			WorldPositionExcludingWPO = BasePassInterpolants.PixelPositionExcludingWPO;
		#endif
		CalcMaterialParametersEx(MaterialParameters, PixelMaterialInputs, SvPosition, ScreenPosition, bIsFrontFace, WorldPosition, WorldPositionExcludingWPO);
	}
	  
	//Clip if the blend mode requires it.
	GetMaterialCoverageAndClipping(MaterialParameters, PixelMaterialInputs);

	half Opacity = GetMaterialOpacity(PixelMaterialInputs);

	// Store the results in local variables and reuse instead of calling the functions multiple times.
	// Store the results in local variables and reuse instead of calling the functions multiple times.
	half3 BaseColor = GetMaterialBaseColor(PixelMaterialInputs);
	half  Metallic = GetMaterialMetallic(PixelMaterialInputs);
	half  Specular = GetMaterialSpecular(PixelMaterialInputs);

#if NONMETAL
	half3 DiffuseColor = BaseColor;
	half SpecularColor = 0.04;
#else
	half DielectricSpecular = 0.08 * Specular;
	half3 DiffuseColor = BaseColor - BaseColor * Metallic;	// 1 mad
	half3 SpecularColor = (DielectricSpecular - DielectricSpecular * Metallic) + BaseColor * Metallic;	// 2 mad
#endif

	half Roughness = GetMaterialRoughness(PixelMaterialInputs);

#if FULLY_ROUGH
	// Factors derived from EnvBRDFApprox( SpecularColor, 1, 1 ) == SpecularColor * 0.4524 - 0.0024
	DiffuseColor += SpecularColor * 0.45;
	SpecularColor = 0;
#else
	half NoV = max( dot( MaterialParameters.WorldNormal, MaterialParameters.CameraVector ), 0 );

#if NONMETAL
	// If nothing is hooked up to Metalic and Specular,
	// then defaults are the same as a non-metal,
	// so this define is safe.
	SpecularColor = EnvBRDFApproxNonmetal( Roughness, NoV );
#else
	SpecularColor = EnvBRDFApprox( SpecularColor, Roughness, NoV );
#endif
#endif

#if MOBILE_EMULATION
	{
		// this feature is only needed for development/editor - we can compile it out for a shipping build (see r.CompileShadersForDevelopment cvar help)
		DiffuseColor = DiffuseColor * ResolvedView.DiffuseOverrideParameter.w + ResolvedView.DiffuseOverrideParameter.xyz;
		SpecularColor = SpecularColor * ResolvedView.SpecularOverrideParameter.w + ResolvedView.SpecularOverrideParameter.xyz;
	}
#endif
	
	half3 Color = 0;
	half IndirectIrradiance = 0;

	#if LQ_TEXTURE_LIGHTMAP
		float2 LightmapUV0, LightmapUV1;
		GetLightMapCoordinates(Interpolants, LightmapUV0, LightmapUV1);

		half4 LightmapColor = GetLightMapColorLQ( LightmapUV0, LightmapUV1, MaterialParameters.WorldNormal );
		Color += LightmapColor.rgb * DiffuseColor;
		IndirectIrradiance = LightmapColor.a;
	#elif CACHED_POINT_INDIRECT_LIGHTING
		#if MATERIALBLENDING_MASKED || MATERIALBLENDING_SOLID

			// Take the normal into account for opaque
			FThreeBandSHVectorRGB PointIndirectLighting;
			PointIndirectLighting.R.V0 = PrecomputedLightingBuffer.IndirectLightingSHCoefficients0[0];
			PointIndirectLighting.R.V1 = PrecomputedLightingBuffer.IndirectLightingSHCoefficients1[0];
			PointIndirectLighting.R.V2 = PrecomputedLightingBuffer.IndirectLightingSHCoefficients2[0];

			PointIndirectLighting.G.V0 = PrecomputedLightingBuffer.IndirectLightingSHCoefficients0[1];
			PointIndirectLighting.G.V1 = PrecomputedLightingBuffer.IndirectLightingSHCoefficients1[1];
			PointIndirectLighting.G.V2 = PrecomputedLightingBuffer.IndirectLightingSHCoefficients2[1];

			PointIndirectLighting.B.V0 = PrecomputedLightingBuffer.IndirectLightingSHCoefficients0[2];
			PointIndirectLighting.B.V1 = PrecomputedLightingBuffer.IndirectLightingSHCoefficients1[2];
			PointIndirectLighting.B.V2 = PrecomputedLightingBuffer.IndirectLightingSHCoefficients2[2];

			FThreeBandSHVector DiffuseTransferSH = CalcDiffuseTransferSH3(MaterialParameters.WorldNormal, 1);

			// Compute diffuse lighting which takes the normal into account
			half3 DiffuseGI = max(half3(0,0,0), DotSH3(PointIndirectLighting, DiffuseTransferSH));

			IndirectIrradiance = Luminance(DiffuseGI);
			Color += DiffuseColor * DiffuseGI;			


		#else 
			
			// Non-directional for translucency
			// Ambient terms packed in xyz
			// Already divided by PI and SH ambient on CPU
			half3 PointIndirectLighting = PrecomputedLightingBuffer.IndirectLightingSHSingleCoefficient;
			half3 DiffuseGI = PointIndirectLighting;

			IndirectIrradiance = Luminance(DiffuseGI);
			Color += DiffuseColor * DiffuseGI;

		#endif
	#endif

	#if !MATERIAL_SHADINGMODEL_UNLIT
		half MaterialAO = GetMaterialAmbientOcclusion(PixelMaterialInputs);
		Color *= MaterialAO;	
		IndirectIrradiance *= MaterialAO;
	#endif

	#if !MATERIAL_SHADINGMODEL_UNLIT
		half Shadow = GetPrimaryPrecomputedShadowMask(Interpolants).r;
		#if DIRECTIONAL_LIGHT_CSM && !MOBILE_MULTI_VIEW
		// Cascaded Shadow Map
		{
			FPCFSamplerSettings Settings;
			Settings.ShadowDepthTexture = MobileDirectionalLight.DirectionalLightShadowTexture;
			Settings.ShadowDepthTextureSampler = MobileDirectionalLight.DirectionalLightShadowSampler;
			Settings.TransitionScale = MobileDirectionalLight.DirectionalLightShadowTransition;
			Settings.ShadowBufferSize = MobileDirectionalLight.DirectionalLightShadowSize;
			Settings.bSubsurface = false;
			Settings.bTreatMaxDepthUnshadowed = false;
			Settings.DensityMulConstant = 0;
			Settings.ProjectionDepthBiasParameters = 0;

			float4 ShadowPosition = float4(0,0,0,0);
			for (int i = 0; i < MAX_MOBILE_SHADOWCASCADES; i++)
			{
				if (MaterialParameters.ScreenPosition.w < MobileDirectionalLight.DirectionalLightShadowDistances[i])
				{
					ShadowPosition = mul(float4(MaterialParameters.ScreenPosition.xyw, 1), MobileDirectionalLight.DirectionalLightScreenToShadow[i]);
					break; // position found.
				}
			}

			// Process CSM only when ShadowPosition is valid.
			if (ShadowPosition.z > 0)
			{
				// Clamp pixel depth in light space for shadowing opaque, because areas of the shadow depth buffer that weren't rendered to will have been cleared to 1
				// We want to force the shadow comparison to result in 'unshadowed' in that case, regardless of whether the pixel being shaded is in front or behind that plane
				float LightSpacePixelDepthForOpaque = min(ShadowPosition.z, 0.99999f);
				Settings.SceneDepth = LightSpacePixelDepthForOpaque;

				#if MOBILE_CSM_QUALITY == 0
					half ShadowMap = ManualNoFiltering(ShadowPosition.xy, Settings);
				#elif MOBILE_CSM_QUALITY == 1
					half ShadowMap = Manual1x1PCF(ShadowPosition.xy, Settings);
				#elif MOBILE_CSM_QUALITY == 2
					half ShadowMap = Manual2x2PCF(ShadowPosition.xy, Settings);
				#else
					#error Unsupported MOBILE_CSM_QUALITY value.
				#endif

				#if FADE_CSM
					float Fade = saturate(MaterialParameters.ScreenPosition.w * MobileDirectionalLight.DirectionalLightDistanceFadeMAD.x + MobileDirectionalLight.DirectionalLightDistanceFadeMAD.y);
					// lerp out shadow based on fade params.
					ShadowMap = lerp(ShadowMap, 1.0, Fade * Fade);
				#endif

				#if MOVABLE_DIRECTIONAL_LIGHT
					Shadow = ShadowMap;
				#else
					Shadow = min(ShadowMap, Shadow);
				#endif
			}
		}
		#endif /* DIRECTIONAL_LIGHT_CSM */

		float NoL = max(0, dot(float3(MaterialParameters.WorldNormal), float3(MobileDirectionalLight.DirectionalLightDirection)));
		float RoL = max(0, dot(float3(MaterialParameters.ReflectionVector), float3(MobileDirectionalLight.DirectionalLightDirection)));

		#if FULLY_ROUGH
			Color += (Shadow * NoL) * MobileDirectionalLight.DirectionalLightColor.rgb * DiffuseColor;
		#else
			Color += (Shadow * NoL) * MobileDirectionalLight.DirectionalLightColor.rgb * (DiffuseColor + SpecularColor * PhongApprox(Roughness, RoL));
			// Environment map has been prenormalized, scale by lightmap luminance
			half3 SpecularIBL = GetImageBasedReflectionLighting(MaterialParameters, Roughness) * IndirectIrradiance;
			#if MATERIAL_PLANAR_FORWARD_REFLECTIONS
				BRANCH
				if (abs(dot(ReflectionPlane.xyz, 1)) > .0001f)
				{
					half4 PlanarReflection = GetPlanarReflection(MaterialParameters.AbsoluteWorldPosition, MaterialParameters.WorldNormal, Roughness);
					// Planar reflections win over reflection environment
					SpecularIBL = lerp(SpecularIBL, PlanarReflection.rgb, PlanarReflection.a);
				}
			#endif
			Color += SpecularIBL * SpecularColor;
		#endif

		#if ENABLE_SKY_LIGHT
			//@mw todo
			// TODO: Also need to do specular.
			Color += GetSkySHDiffuseSimple(MaterialParameters.WorldNormal) * ResolvedView.SkyLightColor.rgb * DiffuseColor;    
		#endif

		#if MAX_DYNAMIC_POINT_LIGHTS > 0
			#if VARIABLE_NUM_DYNAMIC_POINT_LIGHTS
			#if WEBGL || ES2_PROFILE || VULKAN_PROFILE
			// webgl and ES2 needs a constant loop counter
			UNROLL
			for (int i = 0; i < MAX_DYNAMIC_POINT_LIGHTS; i++)
			{
				if (NumDynamicPointLights < MAX_DYNAMIC_POINT_LIGHTS)
				{
					if (i >= NumDynamicPointLights)
					{
						break;
					}
				}
			#else
			for (int i = 0; i < NumDynamicPointLights; i++)
			{
			#endif
			#else
			for (int i = 0; i < NUM_DYNAMIC_POINT_LIGHTS; i++)
			{
			#endif
				float3 ToLight = LightPositionAndInvRadius[i].xyz - MaterialParameters.AbsoluteWorldPosition;
				float DistanceSqr = dot(ToLight, ToLight);
				float3 L = ToLight * rsqrt(DistanceSqr);

				float PointNoL = max(0, dot(MaterialParameters.WorldNormal, L));
				float PointRoL = max(0, dot(MaterialParameters.ReflectionVector, L));

				float Attenuation;

				if (LightColorAndFalloffExponent[i].w == 0)
				{
					// Sphere falloff (technically just 1/d2 but this avoids inf)
					Attenuation = 1 / ( DistanceSqr + 1 );
	
					float LightRadiusMask = Square(saturate(1 - Square(DistanceSqr * (LightPositionAndInvRadius[i].w * LightPositionAndInvRadius[i].w))));
					Attenuation *= LightRadiusMask;
				}
				else
				{
					Attenuation = RadialAttenuation(ToLight * LightPositionAndInvRadius[i].w, LightColorAndFalloffExponent[i].w);		
				}

				#if !FULLY_ROUGH
					Color += min(65000.0, (Attenuation * PointNoL) * LightColorAndFalloffExponent[i].rgb * (DiffuseColor + SpecularColor * PhongApprox(Roughness, PointRoL)));
				#else
					Color += (Attenuation * PointNoL) * LightColorAndFalloffExponent[i].rgb * DiffuseColor;
				#endif
			}
		#endif

	#endif /* !MATERIAL_SHADINGMODEL_UNLIT */
		 
	half3 Emissive = GetMaterialEmissive(PixelMaterialInputs);

	Color += Emissive;

half4 VertexFog = CalculateHeightFog(MaterialParameters.WorldPosition_CamRelative);

#if !MATERIAL_SHADINGMODEL_UNLIT && MOBILE_EMULATION
	Color = lerp(Color, DiffuseColor + SpecularColor, ResolvedView.UnlitViewmodeMask);
#endif

	#if MATERIALBLENDING_ALPHACOMPOSITE
		OutColor = half4(Color * VertexFog.a + VertexFog.rgb * Opacity, Opacity);
	#elif MATERIALBLENDING_TRANSLUCENT
		OutColor = half4(Color * VertexFog.a + VertexFog.rgb, Opacity);
	#elif MATERIALBLENDING_ADDITIVE
		OutColor = half4(Color * (VertexFog.a * Opacity.x), 0.0f);
	#elif MATERIALBLENDING_MODULATE
		half3 FoggedColor = lerp(half3(1, 1, 1), Color, VertexFog.aaa * VertexFog.aaa);
		OutColor = half4(FoggedColor, Opacity);
	#else
		OutColor.rgb = Color * VertexFog.a + VertexFog.rgb;

		// Scene color alpha is not used yet so we set it to 0
		OutColor.a = 0.0;

		#if !OUTPUT_GAMMA_SPACE 
			if (GetHDR32bppEncodeMode() == HDR_ENCODE_NONE)
			{
				// FP16 HDR stores Z in framebuffer alpha
				OutColor.a = BasePassInterpolants.PixelPosition.w; 
			}
		#endif
	#endif

	#if !MATERIALBLENDING_MODULATE && OUTPUT_GAMMA_SPACE
		// MobileHDR applies PreExposure in tonemapper
		OutColor.rgb *= ResolvedView.PreExposure;
	#endif

#if USE_EDITOR_COMPOSITING && (MOBILE_EMULATION)
	// Editor primitive depth testing
	OutColor.a = 1.0;
	#if MATERIALBLENDING_MASKED
		// some material might have an opacity value
		OutColor.a = GetMaterialMaskInputRaw(PixelMaterialInputs);
	#endif
	clip(OutColor.a - GetMaterialOpacityMaskClipValue());
#else
	#if ES2_PROFILE && !OUTPUT_GAMMA_SPACE && !MATERIALDOMAIN_UI
		half Mode = GetHDR32bppEncodeMode();
		if (Mode == HDR_ENCODE_RGBE)
		{
			OutColor.rgb = FrameBufferBlendOp(OutColor);
			OutColor = RGBTEncode8BPC(OutColor.rgb, DEFAULT_32BPPHDR_ENCODED_RANGE);
		}
		if (Mode == HDR_ENCODE_MOSAIC)
		{
			OutColor = half4(HdrMosaic(OutColor.rgb,  SvPosition.xy), OutColor.a);
		}
	#endif
	#if OUTPUT_GAMMA_SPACE
		OutColor.rgb = sqrt(OutColor.rgb);
	#endif

	// We write to alpha here for mono far field mask computation
	#if MONOSCOPIC_FAR_FIELD
		#if MATERIALBLENDING_ANY_TRANSLUCENT
			OutColor.a = Opacity;
		#else
			OutColor.a = 1.0;
		#endif
	#endif
#endif
}

Hello,

We’ve recently released a new method of bug reporting for Unreal Engine 4 issues. Please visit the following forum post for more information: Unreal Engine Bug Submission Form - Announcements - Unreal Engine Forums