I made a custom lighting function on a surface shader, there is a specific effect I want to achieve and for that I think I need to play with the attenuation value, but when I try to calculate it on the vertex function it behaves like this.
![alt text][1]
and this is my shader code:
Shader "Shader Learning/ Basic Surface Custom Lighting"
{
Properties{
_Color("Tint", Color) = (0,0,0,1)
_MainTex("Texture", 2D) = "white" {}
_RampTex("Texture", 2D) = "white" {}
}
SubShader{
Tags{"RenderType" = "Opaque" "Queue" = "Geometry" }
CGPROGRAM
#include "AutoLight.cginc"
#pragma surface surf Custom fullforwardshadows vertex:vert
#pragma target 3.0
sampler2D _MainTex;
sampler2D _RampTex;
fixed4 _Color;
half3 _Emission;
struct CustomSurfaceOutput {
fixed3 Albedo;
fixed3 Normal;
fixed3 Emission;
float4 ObjPos;
float4 WorldPos;
fixed Specular;
fixed Alpha;
float atten;
};
struct Input
{
float2 uv_MainTex;
float atten;
};
float4 LightingCustom(CustomSurfaceOutput s, float3 lightDir, float atten)
{
float towardsLight = dot(s.Normal, lightDir);
towardsLight = towardsLight * 0.5 + 0.5;
float3 lightIntensity = tex2D(_RampTex, towardsLight).rgb;
float4 col;
col.rgb = lightIntensity * s.Albedo * s.atten* _LightColor0.rgb;
col.a = s.Alpha;
return col;
}
void vert(inout appdata_full v, out Input o) {
UNITY_INITIALIZE_OUTPUT(Input, o);
float4 posWorld = mul(unity_ObjectToWorld, v.vertex);
float distanceToLight = _WorldSpaceLightPos0.xyz - posWorld.xyz;
o.atten = 1 - (distanceToLight / (1 / _LightPositionRange.w)); // attenuation formula
}
void surf(Input i, inout CustomSurfaceOutput o)
{
fixed4 col = tex2D(_MainTex,i.uv_MainTex);
col *= _Color;
o.atten = i.atten;
o.Albedo = col.rgb;
}
ENDCG
}
Fallback "Standard"
}
why is this happening and how to fix it?
[1]: /storage/temp/207319-image-2023-05-18-092341356.png
↧