Search Unity

  1. Megacity Metro Demo now available. Download now.
    Dismiss Notice
  2. Unity support for visionOS is now available. Learn more in our blog post.
    Dismiss Notice

Bug GL POINT_SIZE doesn't works in BUILD on Linux Unity Editor 5.4.3f1

Discussion in 'Linux' started by robertofazio, Jul 28, 2016.

  1. robertofazio

    robertofazio

    Joined:
    Dec 6, 2012
    Posts:
    5
    I'm working on a Kinect DepthMap and PointClouds project based on Freenect libs on Ubuntu 16.04 LTS and Linux Unity Editor 5.4.3f1.

    Here you can find my work in progress repository about depth:https://bitbucket.org/robertofazio/kinect-on-unity3d-linux-editor-5.3 )

    I did a VertexColor Shaders that works fine both in Editor and in Build x86_64

    Code (CSharp):
    1. Shader "Custom/VertexColor" {
    2.  
    3.      Properties{
    4.          minVertSize ("MinVertSize", Float) = 3.0
    5.          maxVertSize ("MaxVertSize", Float) = 5.0
    6.          colorDepth ("ColorDepth", Float) = 300.0
    7.          minColor ("MinColor", Vector) = (0,0,1)
    8.          maxColor ("MaxColor", Vector) = (1,0,0)
    9.      }
    10.    
    11.      SubShader {
    12.      Pass {
    13.          LOD 200
    14.          
    15.          CGPROGRAM
    16.          #pragma vertex vert
    17.          #pragma fragment frag
    18.          
    19.           float minVertSize;
    20.           float maxVertSize;
    21.           float colorDepth;
    22.           float3 minColor;
    23.           float3 maxColor;
    24.  
    25.          struct VertexInput {
    26.              float4 v : POSITION;
    27.              float4 color: COLOR;
    28.          };
    29.          
    30.          struct VertexOutput {
    31.              float4 pos : SV_POSITION;
    32.              float4 col : COLOR;
    33.              float size : PSIZE0;
    34.          };
    35.          
    36.          VertexOutput vert(VertexInput v) {
    37.          
    38.              VertexOutput o;
    39.              o.pos = mul(UNITY_MATRIX_MVP, v.v);
    40.              float pct = v.v[2]/colorDepth;
    41.              o.col[0] = lerp(minColor[0],maxColor[0],pct);
    42.              o.col[1] = lerp(minColor[1],maxColor[1],pct);;
    43.              o.col[2] = lerp(minColor[2],maxColor[2],pct);;
    44.              o.size = lerp(minVertSize,maxVertSize,pct);
    45.                          
    46.              return o;
    47.          }
    48.          
    49.          float4 frag(VertexOutput o) : COLOR {
    50.              return o.col;
    51.          }
    52.  
    53.          ENDCG
    54.          }
    55.      }
    56.  
    57. }
    58.  
    I put also a EnablePointSizeScript.cs attached to the Camera:


    Code (CSharp):
    1. #if UNITY_STANDALONE_LINUX || UNITY_EDITOR_LINUX || UNITY_STANDALONE || UNITY_STANDALONE_LINUX_API
    2. #define IMPORT_GLENABLE
    3. #endif
    4. //"/usr/lib/nvidia-361/libGL.so";
    5. using UnityEngine;
    6. using System;
    7. using System.Collections;
    8. using System.Runtime.InteropServices;
    9. public class EnablePointSize : MonoBehaviour
    10. {
    11.      const UInt32 GL_VERTEX_PROGRAM_POINT_SIZE = 0x8642;
    12.      const UInt32 GL_POINT_SMOOTH = 0x0B10;
    13.      //        "/usr/lib/x86_64-linux-gnu/mesa/libGL.so";
    14.      const string LibGLPath =
    15.        
    16.          #if UNITY_EDITOR_LINUX
    17.          "/usr/lib/x86_64-linux-gnu/mesa/libGL.so" ;// Untested on Linux, this may not be correct
    18.          #elif UNITY_STANDALONE_LINUX
    19.          "/usr/lib/x86_64-linux-gnu/mesa/libGL.so" ;// Untested on Linux, this may not be correct
    20.          #elif UNITY_STANDALONE_WIN
    21.          "opengl32.dll";
    22.          #elif UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX
    23.          "/System/Library/Frameworks/OpenGL.framework/OpenGL";
    24.          #else
    25.          null;   // OpenGL ES platforms don't require this feature
    26.          #endif
    27.    
    28.          #if IMPORT_GLENABLE
    29.          [DllImport(LibGLPath)]
    30.      public static extern void glEnable(UInt32 cap);
    31.      private bool mIsOpenGL;
    32.      private bool first;
    33.      void Start()
    34.      {
    35.          mIsOpenGL = SystemInfo.graphicsDeviceVersion.Contains("OpenGL");
    36.          first = true;
    37.          #if UNITY_EDITOR_LINUX
    38.          Debug.LogError("unity editor");
    39.          #elif UNITY_STANDALONE_LINUX
    40.          Debug.LogError ("unity standalone linux");
    41.          #endif
    42.      }
    43.      void OnPreRender()
    44.      {
    45.          if (first)
    46.          {
    47.              if (mIsOpenGL)
    48.                  glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
    49.              first = false;
    50.              Debug.LogError (mIsOpenGL);
    51.              glEnable(GL_POINT_SMOOTH);
    52.          }
    53.      }
    54.          #endif
    55. }
    Everything works in Editor: I mean I'm able to modify at runtime the GL_VERTEX_PROGRAM_POINT_SIZE and VertexColor

    When I build on Ubuntu Linux, GL POINT SIZE doesn't work and VertexColor Shaders works.

    I've tried both OpenGL2 and OpenGLCore. I used both Intel and Amd CPU with a GTX 550 GPU. In the next days I'm going to test on a GTX1070. Finally I also switched to Mesa libGL path "/usr/lib/x86_64-linux-gnu/mesa/libGL.so" and NVIDIA path "/usr/lib/nvidia-361/libGL.so" but I get the same issue, points are always very thin.

    Same project works in in OSX Unity Editor and Standalone OSX
     
  2. Tak

    Tak

    Joined:
    Mar 8, 2010
    Posts:
    1,001
    You've tried launching the player with -force-opengl? This should be exactly the same behavior as the editor.
    With respect to opengl core, we may have a fix incoming for our shader compiler + PSIZE.
     
  3. robertofazio

    robertofazio

    Joined:
    Dec 6, 2012
    Posts:
    5
    Hi Tak,
    No it doens't works.