Search Unity

Saving fragment position into textures.

Discussion in 'Shaders' started by fastcoder, Dec 6, 2016.

  1. fastcoder

    fastcoder

    Joined:
    Sep 20, 2013
    Posts:
    16
    I would like to store my fragment's world coordinates into 3 textures which respectively represent x,y,z coordinates of the coordinate. Despite the textures are in the format ARGB32, I cant seem to write a value bigger than 1.0 Here is my shader code.
    Code (CSharp):
    1.  
    2.  
    3. Shader "Custom/FirstPass"
    4. {
    5.     Properties { }
    6.  
    7.         SubShader
    8.     {
    9.         Tags { "RenderType"="Opaque" }
    10.  
    11.         Pass
    12.         {
    13.             CGPROGRAM
    14.  
    15.             #pragma vertex vert
    16.             #pragma fragment frag
    17.             #pragma target 5.0
    18.  
    19.             #include "UnityCG.cginc"
    20.  
    21.             struct vertexData
    22.             {
    23.                 float4 vertex : POSITION;
    24.                 float3 normal : NORMAL;
    25.                 float4 color : COLOR;
    26.                 float4 texcoord0 : TEXCOORD0;
    27.  
    28.                 //...
    29.             };
    30.  
    31.             struct fragmentData
    32.             {
    33.                 float4 positionC : SV_POSITION;
    34.                 float4 positionW : TEXCOORD1;
    35.  
    36.  
    37.                 //float4 color : COLOR;
    38.                 //...
    39.             };
    40.             struct fragmentOutput
    41.             {
    42.                 float Gx : SV_Target0;
    43.                 float Gy : SV_Target1;
    44.                 float Gz : SV_Target2;
    45.                 float depth:SV_Target3;
    46.  
    47.             };
    48.  
    49.             fragmentData vert(vertexData v)
    50.             {
    51.                 fragmentData o;
    52.                 o.positionW=mul(unity_ObjectToWorld,v.vertex);
    53.                 o.positionC=UnityObjectToClipPos(v.vertex);
    54.                 return o;
    55.             }
    56.  
    57.             fragmentOutput frag(fragmentData fragment)
    58.             {
    59.                 fragmentOutput output;
    60.                 output.Gx =fragment.positionW.x;
    61.                 output.Gy= fragment.positionW.y;
    62.                 output.Gz = fragment.positionW.z;
    63.                 output.depth=fragment.positionC.z;
    64.  
    65.                 //... write to other renderbuffers
    66.  
    67.                 return output;
    68.             }
    69.  
    70.             ENDCG
    71.         }
    72.     }
    73. }
     
  2. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,336
    ARGB32 is 32 bits per pixel, 8 bits per channel. It is only capable of storing values between 0.0 and 1.0 with 256 steps of precision.

    You want RGBAFloat
     
  3. fastcoder

    fastcoder

    Joined:
    Sep 20, 2013
    Posts:
    16
    Here is my C# code which uses RFloat format now. Now the new problem is no matter what float value I write into Gy texture in fragment shader , the code below prints 255 0 255 255

    Code (CSharp):
    1. using UnityEngine;
    2. using System.Collections;
    3. public class mrt : MonoBehaviour {
    4.     public GameObject fragment;
    5.     // Use this for initialization
    6.     RenderTexture Gx,Gy,Gz,depth;
    7.     RenderBuffer[] buffers;
    8.     int width=640;int height=480;
    9.     Camera cam;
    10.     void Start ()
    11.     {
    12.         cam=GameObject.Find("KeyCam").GetComponent<Camera>();
    13.  
    14.         Gx = new RenderTexture(width, height, 0, RenderTextureFormat.RFloat,RenderTextureReadWrite.Linear);
    15.         Gy = new RenderTexture(width, height, 0, RenderTextureFormat.RFloat,RenderTextureReadWrite.Linear);
    16.         Gz = new RenderTexture(width, height, 0, RenderTextureFormat.RFloat,RenderTextureReadWrite.Linear);
    17.         Gx.generateMips = false;
    18.         Gy.generateMips = false;
    19.         Gz.generateMips = false;
    20.         depth = new RenderTexture(width, height, 16,RenderTextureFormat.RFloat,RenderTextureReadWrite.Linear);
    21.         buffers = new RenderBuffer[] { Gx.colorBuffer, Gy.colorBuffer, Gz.colorBuffer,depth.colorBuffer};
    22.         cam.SetTargetBuffers(buffers, depth.depthBuffer);
    23.         saveFragmentPositions ();
    24.     }
    25.  
    26.     // Update is called once per frame
    27.     void Update () {
    28.  
    29.     }
    30.  
    31.     void saveFragmentPositions()
    32.     {
    33.         cam.enabled = false;
    34.  
    35.         cam.RenderWithShader (Shader.Find ("Custom/FirstPass"), "RenderType");
    36.  
    37.         Texture2D t2Gx = new Texture2D(width, height,TextureFormat.RFloat,false);
    38.         Texture2D t2Gy = new Texture2D(width, height,TextureFormat.RFloat,false);
    39.         Texture2D t2Gz = new Texture2D(width, height,TextureFormat.RFloat,false);
    40.         Camera.main.enabled = false;
    41.  
    42.         RenderTexture.active = Gx;
    43.         t2Gx.ReadPixels(new Rect(0, 0, width, height), 0, 0,false);
    44.         t2Gx.Apply ();
    45.  
    46.         RenderTexture.active = Gy;
    47.         t2Gy.ReadPixels(new Rect(0, 0, width, height), 0, 0,false);
    48.         t2Gy.Apply ();
    49.  
    50.         RenderTexture.active = Gz;
    51.         t2Gz.ReadPixels(new Rect(0, 0, width, height), 0, 0,false);
    52.         t2Gz.Apply ();
    53.  
    54.         //Camera.main.enabled = true;
    55.         RenderTexture.active=null;
    56.  
    57.  
    58.         Color32[] pGx = t2Gx.GetPixels32();
    59.         Color32[] pGy = t2Gy.GetPixels32();
    60.         Color32[] pGz = t2Gz.GetPixels32();
    61.  
    62.          // just print the last row of Gx,y,z textures for debugging
    63.         for (int i = 0; i < width; i++)
    64.         {
    65.          
    66.             float x = 0;
    67.             float y = 0;
    68.             float z = 0;
    69.             byte[] bxArray = { pGx [i].a, pGx [i].r, pGx [i].g, pGx [i].b };
    70.             byte[] byArray = { pGy [i].a, pGy [i].r, pGy [i].g, pGy [i].b };
    71.             byte[] bzArray = { pGz [i].a, pGz [i].r, pGz [i].g, pGz [i].b };
    72.  
    73.             x=System.BitConverter.ToSingle(bxArray,0);
    74.             y=System.BitConverter.ToSingle(byArray,0);
    75.             z=System.BitConverter.ToSingle(bzArray,0);
    76.             //Debug.Log (x + " " + y + " " + z);
    77.             Debug.Log (pGy [i].a + " " + pGy [i].r + " " + pGy [i].g+ " " + pGy [i].b);
    78.  
    79.             //Instantiate(fragment, new Vector3(x,y,z), Quaternion.identity);
    80.         }
    81.         Camera.main.enabled = true;
    82.  
    83.  
    84.     }
    85.      
    86.     }
    87.  
     
  4. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,336
    First, I highly recommend using a single RGBAFloat texture instead of multiple render targets with individual float textures. There's a (minor) cost for writing to multiple targets and (larger) cost for reading from multiple textures.

    Second, GetPixels32() and Color32 values do not store floats; just like ARGB32 they are "0.0 to 1.0" values represented as an integer byte (0 to 255) on the CPU side. You need to be using GetPixels() and Color. GetPixels32() isn't going to convert your single channel RFloat to a 4 byte representation of your float, it's going to clamp the float value to 0.0 to 1.0 and then cast that to a 0-255 integer and either just have that in the R, or broadcast that clamped byte across all 4 channels.

    Color[] Cxyz = Gxyz.GetPixels(); // get float values from render texture
    ...
    Vector4 xyz = Cxyz.linear; // get the explicit linear value from the Color


    If you really want a byte representation of the float that you need to convert, you would need to use GetRawTextureData().

    Third, there's no reason to use ReadPixels() here at all. You're rendering to a rendertexture you created and still have direct access to, so the roundabout way of setting each texture as the current active render texture, doing a ReadPixels() and Apply() can be completely avoided by just doing a GetPixels() directly on the original render texture.

    Lastly, if possible, you should never, ever do any of this. ReadPixels / GetPixels / Apply are all very slow. Converting over to a single RGBAFloat and using a single GetPixels() will help a bit, but generally if you're looking reading data on the CPU from the GPU there's probably a way to do what you want keeping everything on the GPU.
     
  5. dizzy2003

    dizzy2003

    Joined:
    Nov 25, 2013
    Posts:
    108
    RenderTexture has no GetPixels Thats why you need to use a texture as an intermediate and call ReadPixels.