Search Unity

Surface Shader Input structure

Discussion in 'Shaders' started by teammetablast, Jan 4, 2011.

  1. teammetablast

    teammetablast

    Joined:
    Nov 22, 2010
    Posts:
    13
    Is it possible to have the Input structure of a Surface Shader contain the surface normal vector, AND have the SurfaceOutput set it's normal to a value from a normal map lookup?

    The reason I want to do this is because I am writing a shader that will fade the material based on the dot product of the view direction vector and surface normal, but I don't want to use the normal from the normal map, because it can cause visible artifacts in the area that should be transparent. Rather, I want to use the surface normal before normal mapping because it won't be as prone to having the artifacts show up.

    Here is the relevant surface shading portion of the code the current Input structure:

    Code (csharp):
    1. struct Input {
    2.             float2 uv_MainTex;
    3.             float2 uv_GlossTex;
    4.             float2 uv_BumpMap;
    5.             float3 viewDir;
    6.             float3 worldRefl;
    7.             INTERNAL_DATA
    8.         };
    9.  
    10. void surf (Input IN, inout SurfaceOutputSpecColor o) {
    11.             half4 tex = tex2D(_MainTex, IN.uv_MainTex);
    12.             half4 gloss = tex2D(_GlossTex, IN.uv_GlossTex);
    13.             o.Albedo = tex.rgb * _Color.rgb;
    14.             //Gloss colour come from RGB
    15.             o.GlossColor = _Shininess * gloss.rgb;
    16.            
    17.             //Specular is mapped
    18.             o.Specular = gloss.a;
    19.                         o.Emission = _ReflStrength * texCUBE (_Cube, WorldReflectionVector (IN, o.Normal)).rgb;
    20.             o.Normal = UnpackNormal(tex2D(_BumpMap, IN.uv_BumpMap));
    21.            
    22.                         // here is where I want to use the original surface normal, not the texture lookup normal
    23.             half rim = 1.0 - saturate(dot (normalize(IN.viewDir), IN.o.Normal));
    24.             rim = lerp(_CenterAlpha, _EdgeAlpha, rim);
    25.             o.Alpha = pow(rim,_AlphaPower);
    26.         }
     
  2. teammetablast

    teammetablast

    Joined:
    Nov 22, 2010
    Posts:
    13
    Ok, so I have been trying to figure this out all day. The problem isn't that I can't access the original normal value, but that when I try to use it and then write to the normal value with a new normal from a texture I have narrowed this issue down, but I have no idea why it is happening or what I can/should do to correct it. Below are some screen shots and surface shader code used to create each material. The object is just a sphere straddling a blue plane and the default Unity background.

    First shot: This is using the default value of the normal (that is,the value initially contained in SurfaceOutput.Normal when the surf() function is called) to calculate the alpha, and not changing the value of the SurfaceOutput.Normal value. Note that the alpha fade is exactly what I am trying to achieve, however, the normal map isn't being used at all.



    And the code for the surface shader:
    Code (csharp):
    1. Shader "Protein/Base" {
    2.     Properties {
    3.         _Color ("Main Color", Color) = (1,1,1,1)
    4.         _MainTex ("Base (RGB)", 2D) = "white" {}
    5.         _BumpMap ("Normalmap", 2D) = "bump" {}
    6.     }
    7.    
    8.     SubShader {
    9.         Tags{ "RenderType" = "Transparent" "Queue" = "Transparent" }
    10.            
    11.         CGPROGRAM
    12.        
    13.         #pragma target 3.0
    14.         #pragma surface surf Lambert alpha
    15.         #include "UnityCG.cginc"
    16.  
    17.         sampler2D _MainTex;
    18.         sampler2D _BumpMap;
    19.        
    20.         float4 _Color;
    21.  
    22.         struct Input {
    23.             float2 uv_MainTex;
    24.             float2 uv_BumpMap;
    25.             float3 viewDir;
    26.         };
    27.            
    28.         void surf (Input IN, inout SurfaceOutput o)
    29.         {
    30.             half4 tex = tex2D(_MainTex, IN.uv_MainTex);
    31.             o.Albedo = tex.rgb * _Color.rgb;
    32.             o.Alpha = 1.0 - saturate(dot (normalize(IN.viewDir),o.Normal));
    33.         }
    34.         ENDCG
    35.     }
    36.     Fallback "Diffuse"
    37. }

    Here is the second pic. This is using the normal from the normal map to calculate the alpha value, which results in non-transparent areas where I want it to be transparent:




    And here is the code used for it:
    Code (csharp):
    1. Shader "Protein/Base" {
    2.     Properties {
    3.         _Color ("Main Color", Color) = (1,1,1,1)
    4.         _MainTex ("Base (RGB)", 2D) = "white" {}
    5.         _BumpMap ("Normalmap", 2D) = "bump" {}
    6.     }
    7.    
    8.     SubShader {
    9.         Tags{ "RenderType" = "Transparent" "Queue" = "Transparent" }
    10.            
    11.         CGPROGRAM
    12.        
    13.         #pragma target 3.0
    14.         #pragma surface surf Lambert alpha
    15.         #include "UnityCG.cginc"
    16.  
    17.         sampler2D _MainTex;
    18.         sampler2D _BumpMap;
    19.        
    20.         float4 _Color;
    21.  
    22.         struct Input {
    23.             float2 uv_MainTex;
    24.             float2 uv_BumpMap;
    25.             float3 viewDir;
    26.         };
    27.            
    28.         void surf (Input IN, inout SurfaceOutput o)
    29.         {
    30.             half4 tex = tex2D(_MainTex, IN.uv_MainTex);
    31.             o.Albedo = tex.rgb * _Color.rgb;
    32.             o.Normal = UnpackNormal(tex2D(_BumpMap, IN.uv_BumpMap));
    33.             o.Alpha = 1.0 - saturate(dot (normalize(IN.viewDir),o.Normal));
    34.         }
    35.         ENDCG
    36.     }
    37.     Fallback "Diffuse"
    38. }
    And finally, here is the what happens when I try to use the original normal to calculate the alpha, and then write the normal from the normal map to SurfaceOutput.Normal. Note that the entire object is rendered opaque, and there seems to be pixellation as well:



    And the code used. I should mention that the problem occurs no matter how the lines are rearranged, even if the original normal is saved to another variable and used after the normal map's normal is saved to o.Normal.

    Code (csharp):
    1. Shader "Protein/Base" {
    2.     Properties {
    3.         _Color ("Main Color", Color) = (1,1,1,1)
    4.         _MainTex ("Base (RGB)", 2D) = "white" {}
    5.         _BumpMap ("Normalmap", 2D) = "bump" {}
    6.     }
    7.    
    8.     SubShader {
    9.         Tags{ "RenderType" = "Transparent" "Queue" = "Transparent" }
    10.            
    11.         CGPROGRAM
    12.        
    13.         #pragma target 3.0
    14.         #pragma surface surf Lambert alpha
    15.         #include "UnityCG.cginc"
    16.  
    17.         sampler2D _MainTex;
    18.         sampler2D _BumpMap;
    19.        
    20.         float4 _Color;
    21.  
    22.         struct Input {
    23.             float2 uv_MainTex;
    24.             float2 uv_BumpMap;
    25.             float3 viewDir;
    26.         };
    27.            
    28.         void surf (Input IN, inout SurfaceOutput o)
    29.         {
    30.             half4 tex = tex2D(_MainTex, IN.uv_MainTex);
    31.             o.Albedo = tex.rgb * _Color.rgb;
    32.             o.Alpha = 1.0 - saturate(dot (normalize(IN.viewDir),o.Normal));
    33.             o.Normal = UnpackNormal(tex2D(_BumpMap, IN.uv_BumpMap));
    34.         }
    35.         ENDCG
    36.     }
    37.     Fallback "Diffuse"
    38. }
     
  3. aubergine

    aubergine

    Joined:
    Sep 12, 2009
    Posts:
    2,880
    Im a newbie at surface shaders as well, but one thing i know for sure is you can write your own custom vertex function to calculate anything you want in there and you can pass it to the surf later on.

    this example from the unity docs:

    Shader "Example/Custom Vertex Data" {
    Properties {
    _MainTex ("Texture", 2D) = "white" {}
    }
    SubShader {
    Tags { "RenderType" = "Opaque" }
    CGPROGRAM
    #pragma surface surf Lambert vertex:vert
    struct Input {
    float2 uv_MainTex;
    float3 customColor;
    };
    void vert (inout appdata_full v, out Input o) {
    o.customColor = abs(v.normal);
    }
    sampler2D _MainTex;
    void surf (Input IN, inout SurfaceOutput o) {
    o.Albedo = tex2D (_MainTex, IN.uv_MainTex).rgb;
    o.Albedo *= IN.customColor;
    }
    ENDCG
    }
    Fallback "Diffuse"
    }
     
  4. teammetablast

    teammetablast

    Joined:
    Nov 22, 2010
    Posts:
    13
    If I move the alpha calculation from the surface shader function to the vertex function, I don't think I will be able to get the view direction of the camera, will I? If I can, I think this would be possible, but I would still prefer it to be done in the surface shader.

    I just don't understand why this isn't working! Why would using the interpolated normal then writing over it with a normal map lookup cause it not to work? Is this a Unity bug or am I missing something else here?
     
  5. spalmer

    spalmer

    Joined:
    Jun 3, 2014
    Posts:
    6
    You need to use WorldNormalVector in the surface shader if you write to o.Normal. o.Normal defaults to 0,0,1 and is in tangent space, relative to vertex normal. See "Surface Shader Input Structure" section of this documentation page: http://docs.unity3d.com/Manual/SL-SurfaceShaders.html