Search Unity

  1. Megacity Metro Demo now available. Download now.
    Dismiss Notice
  2. Unity support for visionOS is now available. Learn more in our blog post.
    Dismiss Notice

Holographic Photo Blending with PhotoCapture

Discussion in 'AR' started by BrandonFogerty, Jul 15, 2016.

  1. BrandonFogerty

    BrandonFogerty

    Joined:
    Jan 29, 2016
    Posts:
    83
    Some have asked for an example of capturing a holographic photo that blends in with the physical environment. You can do this by using the PhotoCapture API along with the projection and world to camera matrices that are included with the captured image data.




    Attached is a C# script and a shader. The C# script captures an image using the web camera on the HoloLens whenever you do the airtap gesture. The C# script will upload the captured image to the GPU so that the shader can access the image data. The shader will calculate what part of the image should be shown based on where the photo was taken.

    HoloLensSnapshotTest.cs
    Code (CSharp):
    1. using UnityEngine;
    2. using System.Collections;
    3. using System.Collections.Generic;
    4. using UnityEngine.VR.WSA.WebCam;
    5. using UnityEngine.VR.WSA.Input;
    6.  
    7. public class HoloLensSnapshotTest:MonoBehaviour
    8. {
    9.     GestureRecognizer m_GestureRecognizer;
    10.     GameObject m_Canvas = null;
    11.     Renderer m_CanvasRenderer = null;
    12.     PhotoCapture m_PhotoCaptureObj;
    13.     CameraParameters m_CameraParameters;
    14.     bool m_CapturingPhoto = false;
    15.     Texture2D m_Texture = null;
    16.  
    17.     void Start()
    18.     {
    19.         Initialize();
    20.     }
    21.  
    22.     void SetupGestureRecognizer()
    23.     {
    24.         m_GestureRecognizer = new GestureRecognizer();
    25.         m_GestureRecognizer.SetRecognizableGestures(GestureSettings.Tap);
    26.         m_GestureRecognizer.TappedEvent += OnTappedEvent;
    27.         m_GestureRecognizer.StartCapturingGestures();
    28.  
    29.         m_CapturingPhoto = false;
    30.     }
    31.  
    32.     void Initialize()
    33.     {
    34.         Debug.Log("Initializing...");
    35.         List<Resolution> resolutions = new List<Resolution>(PhotoCapture.SupportedResolutions);
    36.         Resolution selectedResolution = resolutions[0];
    37.  
    38.         m_CameraParameters = new CameraParameters(WebCamMode.PhotoMode);
    39.         m_CameraParameters.cameraResolutionWidth = selectedResolution.width;
    40.         m_CameraParameters.cameraResolutionHeight = selectedResolution.height;
    41.         m_CameraParameters.hologramOpacity = 0.0f;
    42.         m_CameraParameters.pixelFormat = CapturePixelFormat.BGRA32;
    43.  
    44.         m_Texture = new Texture2D(selectedResolution.width,selectedResolution.height,TextureFormat.BGRA32,false);
    45.  
    46.         PhotoCapture.CreateAsync(false,OnCreatedPhotoCaptureObject);
    47.     }
    48.  
    49.     void OnCreatedPhotoCaptureObject(PhotoCapture captureObject)
    50.     {
    51.         m_PhotoCaptureObj = captureObject;
    52.         m_PhotoCaptureObj.StartPhotoModeAsync(m_CameraParameters,true,OnStartPhotoMode);
    53.     }
    54.  
    55.     void OnStartPhotoMode(PhotoCapture.PhotoCaptureResult result)
    56.     {
    57.         SetupGestureRecognizer();
    58.  
    59.         Debug.Log("Ready!");
    60.         Debug.Log("Air Tap to take a picture.");
    61.     }
    62.  
    63.     void OnTappedEvent(InteractionSourceKind source,int tapCount,Ray headRay)
    64.     {
    65.         if(m_CapturingPhoto)
    66.         {
    67.             return;
    68.         }
    69.  
    70.         m_CapturingPhoto = true;
    71.         Debug.Log("Taking picture...");
    72.         m_PhotoCaptureObj.TakePhotoAsync(OnPhotoCaptured);
    73.     }
    74.  
    75.     void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result,PhotoCaptureFrame photoCaptureFrame)
    76.     {
    77.         if(m_Canvas == null)
    78.         {
    79.             m_Canvas = GameObject.CreatePrimitive(PrimitiveType.Quad);
    80.             m_Canvas.name = "PhotoCaptureCanvas";
    81.             m_CanvasRenderer = m_Canvas.GetComponent<Renderer>() as Renderer;
    82.             m_CanvasRenderer.material = new Material(Shader.Find("AR/HolographicImageBlend"));
    83.         }
    84.  
    85.         Matrix4x4 cameraToWorldMatrix;
    86.         photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
    87.         Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;
    88.  
    89.         Matrix4x4 projectionMatrix;
    90.         photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);
    91.  
    92.         photoCaptureFrame.UploadImageDataToTexture(m_Texture);
    93.         m_Texture.wrapMode = TextureWrapMode.Clamp;
    94.  
    95.         m_CanvasRenderer.sharedMaterial.SetTexture("_MainTex",m_Texture);
    96.         m_CanvasRenderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix",worldToCameraMatrix);
    97.         m_CanvasRenderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix",projectionMatrix);
    98.         m_CanvasRenderer.sharedMaterial.SetFloat("_VignetteScale", 1.0f);
    99.  
    100.         // Position the canvas object slightly in front
    101.         // of the real world web camera.
    102.         Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);
    103.  
    104.         // Rotate the canvas object so that it faces the user.
    105.         Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2),cameraToWorldMatrix.GetColumn(1));
    106.  
    107.         m_Canvas.transform.position = position;
    108.         m_Canvas.transform.rotation = rotation;
    109.  
    110.         Debug.Log("Took picture!");
    111.         m_CapturingPhoto = false;
    112.     }
    113. }
    114.  
    HolographicImageBlendShader.shader
    Code (CSharp):
    1. Shader "AR/HolographicImageBlend"
    2. {
    3.     Properties
    4.     {
    5.         _MainTex ("Texture", 2D) = "white" {}
    6.         _VignetteScale ("Vignette Scale", RANGE(0,2)) = 0
    7.     }
    8.     SubShader
    9.     {
    10.         Tags { "RenderType"="Opaque" }
    11.         LOD 100
    12.  
    13.         Pass
    14.         {
    15.             CGPROGRAM
    16.             #pragma vertex vert
    17.             #pragma fragment frag
    18.  
    19.             #include "UnityCG.cginc"
    20.  
    21.             struct appdata
    22.             {
    23.                 float4 vertex : POSITION;
    24.             };
    25.  
    26.             struct v2f
    27.             {
    28.                 float4 vertexPositionInProjectionSpace : SV_POSITION;
    29.                 float2 uv : TEXCOORD0;
    30.                 float4 vertexInProjectionSpace : TEXCOORD1;
    31.             };
    32.  
    33.             sampler2D _MainTex;
    34.             float4x4 _WorldToCameraMatrix;
    35.             float4x4 _CameraProjectionMatrix;
    36.             float _VignetteScale;
    37.  
    38.             v2f vert (appdata v)
    39.             {
    40.                 v2f o;
    41.                 o.vertexPositionInProjectionSpace = mul(UNITY_MATRIX_MVP, v.vertex);
    42.  
    43.                 // Calculate the vertex position in world space.
    44.                 float4 vertexPositionInWorldSpace = mul(unity_ObjectToWorld, float4(v.vertex.xyz,1));
    45.                 // Now take the world space vertex position and transform it so that
    46.                 // it is relative to the physical web camera on the HoloLens.
    47.                 float4 vertexPositionInCameraSpace = mul(_WorldToCameraMatrix, float4(vertexPositionInWorldSpace.xyz,1));
    48.  
    49.                 // Convert our camera relative vertex into clip space.
    50.                 o.vertexInProjectionSpace = mul(_CameraProjectionMatrix, float4(vertexPositionInCameraSpace.xyz, 1.0));
    51.  
    52.                 return o;
    53.             }
    54.  
    55.             fixed4 frag (v2f i) : SV_Target
    56.             {
    57.                 // Transform the vertex into normalized coordinate space.  Basically
    58.                 // we want to map where our vertex should be on the screen into the -1 to 1 range
    59.                 // for both the x and y axes.
    60.                 float2 signedUV = i.vertexInProjectionSpace.xy / i.vertexInProjectionSpace.w;
    61.  
    62.                 // The HoloLens uses an additive display so the color black will
    63.                 // be transparent.  If the texture is smaller than the canvas, color the extra
    64.                 // area on the canvas black so it will be transparent on the HoloLens.
    65.                 if(abs(signedUV.x) > 1.0 || abs(signedUV.y) > 1.0)
    66.                 {
    67.                     return fixed4( 0.0, 0.0, 0.0, 0.0);
    68.                 }
    69.  
    70.                 // Currently our signedUV's x and y coordinates will fall between -1 and 1.
    71.                 // We need to map this range from 0 to 1 so that we can sample our texture.
    72.                 float2 uv = signedUV * 0.5 + float2(0.5, 0.5);
    73.                 fixed4 finalColor = tex2D(_MainTex, uv);
    74.  
    75.                 // Finally add a circular vignette effect starting from the center
    76.                 // of the image.
    77.                 finalColor *= 1.0-(length(signedUV) * _VignetteScale);
    78.  
    79.                 return finalColor;
    80.             }
    81.             ENDCG
    82.         }
    83.     }
    84. }
    85.  
     
    sdavari, FracEdd, hitoruna and 3 others like this.
  2. Gerenatian

    Gerenatian

    Joined:
    Jul 23, 2015
    Posts:
    3
    Brandon, I've followed your example and a few other to try and use the Hololens camera. In all cases, I seem to be getting stuck at
    Code (CSharp):
    1. PhotoCapture.SupportedResolutions
    returning an empty list. Do you have an idea what might be causing this? Im using the latest Unity Beta (24) and have asked for Camera permissions.
     
    ns-mhashimoto likes this.
  3. BrandonFogerty

    BrandonFogerty

    Joined:
    Jan 29, 2016
    Posts:
    83
    Have you enabled both WebCam and Mic in the capabilities settings?
    RequiredCapabilities.png
     
  4. Gerenatian

    Gerenatian

    Joined:
    Jul 23, 2015
    Posts:
    3
    Yes, I have. After deploying my App, I don't see a seperate entry in the settings screen under Camera listing that my App is asking for those permissions though. When I call
    Code (CSharp):
    1. Resolution selectedResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
    in the Start method, I receive a null reference exception
    Code (CSharp):
    1. PhotoCapture.SupportedResolutions
    is returning empty.
     
  5. Waterine

    Waterine

    Joined:
    Jul 27, 2016
    Posts:
    6
    By default Package.appxmanifest is in Unity's no-overwrite list, so if the capability was added after the UWP app was generated, it might not get updated. You can open Package.appxmanifest to confirm or manually add it:

    <Capabilities>
    <DeviceCapability Name="webcam" />
    </Capabilities>
     
    BrandonFogerty, Gerenatian and behram like this.
  6. Gerenatian

    Gerenatian

    Joined:
    Jul 23, 2015
    Posts:
    3
    This is my current XML. Thank you Waterine, I believe you're correct. I would have never found that.

    <Capabilities>
    <uap2:Capability Name="spatialPerception" />
    <DeviceCapability Name="microphone" />
    </Capabilities>
     
  7. cyberstorm5076

    cyberstorm5076

    Joined:
    Jul 28, 2016
    Posts:
    2
    I don't understand how to incorporate the C# script and shader into Unity... Do I make a blank Game Object or something? Could somebody please explain
     
    hitoruna likes this.
  8. BrandonFogerty

    BrandonFogerty

    Joined:
    Jan 29, 2016
    Posts:
    83
    Hi cyberstorm5076!

    You can create a game object in the editor like so,

    CreateAGameObject.png

    You can also create the sphere game object dynamically in code like so,
    Code (CSharp):
    1. GameObject sphereGameObject = GameObject.CreatePrimitive(PrimitiveType.Sphere);
    2. sphereGameObject.name = "Sphere";
    The following is a good tutorial to get you up and running with writing shaders.


    Shaders are just GPU programs that do certain things like modify your vertices, generate vertices, or calculate what the final pixel color should be. A Material is an asset that your artist typically creates. Think of it this way. If shaders are GPU programs, then materials are the command line arguments that you pass into the shader. The material also stores a reference to the shader that it uses.

    The following code sample is a Color Fader shader and script I wrote. The Color Fader shader will morph the color of a game object between 2 different colors based on whatever the current color morph value is. The color morph value is a variable that must between the numbers 0.0 and 1.0. When the morph value is 0.0, the color of the game object will be whatever Color0 is. When the morph value is 1.0, the color of the game object will be whatever Color1 is.

    The ColorController script dynamically creates the material and shader and then feeds in your values to the shader. In this script, I dynamically create the material and shader however you could also assign the material as a public property as well.

    ColorController.cs
    Code (CSharp):
    1. using UnityEngine;
    2. using System.Collections;
    3.  
    4. [RequireComponent(typeof(MeshRenderer))]
    5.  
    6. [ExecuteInEditMode]
    7. public class ColorController : MonoBehaviour
    8. {
    9.     public Color Color0 = Color.red;
    10.     public Color Color1 = Color.blue;
    11.     [Range(0.0f,1.0f)]
    12.     public float ColorMorphValue = 0.0f;
    13.  
    14.     private Material myMaterial = null;
    15.  
    16.     void Start()
    17.     {
    18.         Renderer gameObjectRenderer = this.transform.GetComponent<Renderer>();
    19.         myMaterial = new Material(Shader.Find("MyCustomShader/ColorFader"));
    20.         gameObjectRenderer.sharedMaterial = myMaterial;
    21.     }
    22.  
    23.     void Update ()
    24.     {
    25.         if(myMaterial == null)
    26.         {
    27.             return;
    28.         }
    29.  
    30.         // Send custom parameters from the CPU to the GPU shader.
    31.         myMaterial.SetColor("_Color0", Color0);
    32.         myMaterial.SetColor("_Color1", Color1);
    33.         myMaterial.SetFloat("_ColorMorphValue", ColorMorphValue);
    34.     }
    35. }
    36.  
    ColorFader.shader
    Code (CSharp):
    1. Shader "MyCustomShader/ColorFader"
    2. {
    3.     Properties
    4.     {
    5.         _Color0 ("Color 0", COLOR) = (1.0, 1.0, 1.0, 1.0)
    6.         _Color1 ("Color 1", COLOR) = (0.0, 0.0, 0.0, 1.0)
    7.         _ColorMorphValue ( "Color Morph Value", FLOAT) = 0.0
    8.     }
    9.     SubShader
    10.     {
    11.         Tags { "RenderType"="Opaque" }
    12.  
    13.         Pass
    14.         {
    15.             CGPROGRAM
    16.             #pragma vertex vert
    17.             #pragma fragment frag
    18.      
    19.             #include "UnityCG.cginc"
    20.  
    21.             struct appdata
    22.             {
    23.                 float4 vertex : POSITION;
    24.                 float2 uv : TEXCOORD0;
    25.             };
    26.  
    27.             struct v2f
    28.             {
    29.                 float4 vertex : SV_POSITION;
    30.                 float3 color : COLOR;
    31.             };
    32.      
    33.             float3 _Color0;
    34.             float3 _Color1;
    35.             float _ColorMorphValue;
    36.  
    37.             v2f vert (appdata v)
    38.             {
    39.                 v2f o;
    40.                 o.vertex = mul(UNITY_MATRIX_MVP, v.vertex);
    41.                 o.color = (1.0-_ColorMorphValue) * _Color0 + (_ColorMorphValue * _Color1);
    42.                 return o;
    43.             }
    44.      
    45.             float4 frag (v2f i) : SV_Target
    46.             {
    47.                 return float4(i.color, 1.0);
    48.             }
    49.             ENDCG
    50.         }
    51.     }
    52. }
    53.  
    If you would rather your ColorController script reference a premade material asset, then you will need to create the material asset like so,
    CreateAMaterial.png

    Then you will need to assign the correct shader to the material like so,
    AssignAMaterial.png

    Finally you will need to drag and drop the material onto your game object.
    Then you will need to drag and drop the modified ColorController script onto your game object.

    ColorController.cs - References an Assigned Material Asset
    Code (CSharp):
    1. using UnityEngine;
    2. using System.Collections;
    3.  
    4. [RequireComponent(typeof(MeshRenderer))]
    5.  
    6. [ExecuteInEditMode]
    7. public class ColorController : MonoBehaviour
    8. {
    9.     public Color Color0 = Color.red;
    10.     public Color Color1 = Color.blue;
    11.     [Range(0.0f,1.0f)]
    12.     public float ColorMorphValue = 0.0f;
    13.  
    14.     void Update ()
    15.     {
    16.         Renderer myRenderer = this.GetComponent<Renderer>();
    17.  
    18.         // Send custom parameters from the CPU to the GPU shader.
    19.         myRenderer.sharedMaterial.SetColor("_Color0", Color0);
    20.         myRenderer.sharedMaterial.SetColor("_Color1", Color1);
    21.         myRenderer.sharedMaterial.SetFloat("_ColorMorphValue", ColorMorphValue);
    22.     }
    23. }
    24.  
    In order to use the HoloLensSnapshotTest script, just drag and drop it onto your camera game object.

    I hope that helps!
     
    hitoruna likes this.
  9. cyberstorm5076

    cyberstorm5076

    Joined:
    Jul 28, 2016
    Posts:
    2
    @BrandonFogerty thank you so much, this was a very detailed explanation and thoughtful of you :)
     
  10. yjlin5210

    yjlin5210

    Joined:
    Aug 5, 2016
    Posts:
    7
    @BrandonFogerty Thank you so much. I'm able to use your shader to align the web cam result on hololens. The downside is, playing the webcam texture will drop the frame rate from 60fps to 15fps. Does anyone know how can we get the webcam frame data while maintaining high fps still?
     
  11. BrandonFogerty

    BrandonFogerty

    Joined:
    Jan 29, 2016
    Posts:
    83
    Hi @yjlin5210

    I am glad I could help! Anytime Mixed Reality Capture is used on the HoloLens, the fps will automatically drop to 30 fps as the price of doing business. However I was unaware that it is dropping lower that 30 fps. Do you have a minimum repro project that I could take a look at? Either way, I will look into this further. Thanks for bringing this to my attention!
     
  12. yjlin5210

    yjlin5210

    Joined:
    Aug 5, 2016
    Posts:
    7
    @BrandonFogerty
    All I do is

    Code (CSharp):
    1. WebCamTexture back;
    2. back = new WebCamTexture(WebCamTexture.devices[0].name);
    3. back.Play();
    And the frame rate drops to 15, if I specify the size down to 640x360 or lower
    Code (CSharp):
    1. back = new WebCamTexture(WebCamTexture.devices[0].name, 320, 180);
    2. back.Play();
    The frame rate will be around 20

    Here are some other people who get the same results as I do.
    http://forums.hololens.com/discussion/comment/6668/#Comment_6668
     
    Last edited: Aug 5, 2016
  13. yjlin5210

    yjlin5210

    Joined:
    Aug 5, 2016
    Posts:
    7
    Here is the video reference
     
  14. yjlin5210

    yjlin5210

    Joined:
    Aug 5, 2016
    Posts:
    7
    After update the OS on Hololens, the frame rate is jumping around 20 to 30. Most of the time it is around 25.
     
  15. BrandonFogerty

    BrandonFogerty

    Joined:
    Jan 29, 2016
    Posts:
    83
    Hi @yjlin5210,

    Have you tried using the PhotoCapture api to take photos?
     
  16. yjlin5210

    yjlin5210

    Joined:
    Aug 5, 2016
    Posts:
    7
    @BrandonFogerty
    When using PhotoCapture, the game's framerate is running at around 40fps or higher, and the placement of the image position is much more stable. However, the RGB camera's update rate is way slower and probably update at around 5 fps.
     
  17. yjlin5210

    yjlin5210

    Joined:
    Aug 5, 2016
    Posts:
    7
    Another question, if I am interested in some of the points on the texture, and want to project the point to world space (Assume the z is a fixed distance, such as 100). How should I do that?

    Right now I tried using.
    Code (CSharp):
    1. Vector3 poiPoint = new Vector3(point2D.x, point2D.y, 100); // point2D is a 2D vector in the RGB camera space;
    2. Matrix4x4 inverseMVP = (projectionMatrix * worldToCameraMatrix).inverse; // the projectionMatrix and worldToCameraMatrix are from the photoCapture information
    3. Vector2 poiPointInWorld = inverseMVP.MultiplyPoint3x4(poiPoint);
    but the result is wrong.
     
    Last edited: Aug 15, 2016
  18. yjlin5210

    yjlin5210

    Joined:
    Aug 5, 2016
    Posts:
    7
    I think I found the problem, the coordinate system in opencv's Mat is different from texture2D.
     
  19. acylum

    acylum

    Joined:
    Jul 16, 2013
    Posts:
    3
    @BrandonFogerty - This photo Blending that you made is cool!

    I've been trying to take what you have here and map the photo to the spatial mesh. Using SpatialMappingManager.Instance.SetSurfaceMaterial(new Material(Shader.Find("AR/HolographicImageBlend"))); in the function void OnPhotoCaptured. The problem is that the mesh just turns all white. Do I need to set UV's on the mesh or is there something else I'm missing? What do you recommend?.

    The goal here is to map the photo as a texture onto the spatial mesh.

    Thanks
     
  20. BrandonFogerty

    BrandonFogerty

    Joined:
    Jan 29, 2016
    Posts:
    83
    Hi @acylum,

    The Holographic Image Blend shader requires that the mesh it is applied to contain texture coordinates. However the spatial mapping mesh only contains vertices and indices. That is why your spatial mapping mesh appears white. You can add your own uvs to the spatial mapped mesh but that may be a bit complicated depending on what you are trying to do.
     
  21. acylum

    acylum

    Joined:
    Jul 16, 2013
    Posts:
    3
    @BrandonFogerty - yeah, I tried to use a projector component on the main camera - thinking it was a way to add UVs to the spatial mesh - I only got a single color and not the whole texture mapped to the mesh. (I guess the projector component doesn't add UV??)

    Another question - I also tried to use your photo blend with the new HolotoolKit that includes the Asobo spatial understanding - the issue is that when I try to take a photo while the new green mesh is active, I only get a white box - no photo. However when I deactivate the new spatial mesh, then everything works again. Any idea why it wouldn't work with the new spatial understanding module in holotoolkit?

    Thanks for your time!
     
  22. hehepoo

    hehepoo

    Joined:
    Jan 12, 2017
    Posts:
    2
  23. hehepoo

    hehepoo

    Joined:
    Jan 12, 2017
    Posts:
    2
    Hi @BrandonFogerty

    As I said earlier, I am using Unity 5.6.0b3 and trying to using your script and shader for my learning.
    Somehow, I can't make it to work.
    Here is what I did and I would like for you to let me know what I did wrong:
    1. New Unity 3D project.
    2. In the project, I created a new script, HoloLensSnapshotTest and copied your code there.
    3. I created a shader, HolographicImage and copied your shard code here.
    4. Add component in the Main Camera and select HoloLensSnapshotTest.
    5. Changed build settings: Windows Store, Universal 10, Hololens, D3D, Local Machine and Unity C# Project checked.
    6. Update player settings for : InternetClient, WebCam, VideosLibrary, PicturesLibrary, Microphone, and SpatialPerception checked.
    7. save a scene.
    8. Created App folder and build.
    9. Switched to Visual Studio with the solution file in the app folder.
    10. Changed to "Release", "x86", and "Remote Machine" with my hololens IP.
    11. Ctrl+F5 to run.

    I observed that tapped event never trigged with the app.

    If you can help me what I did wrong, I would appreciate it.

    Thanks!
     
  24. foxvalleysoccer

    foxvalleysoccer

    Joined:
    May 11, 2015
    Posts:
    108
    I'm attempting to follow your code above and i get a null reference on your line:
    Code (CSharp):
    1.  m_CanvasRenderer.material = new Material(Shader.Find("AR/HolographicImageBlend"));
    This happens when i deploy to hololens and air tap.
    In my solution I don't see the shader I created which is weird. When in the Unity Editor i see the folder and shader in that solution. But it does not show up anywhere once i build from unity. Any ideas what might the issue be?
     
  25. unity_andrewc

    unity_andrewc

    Unity Technologies

    Joined:
    Dec 14, 2015
    Posts:
    218
    hitoruna likes this.
  26. radonthetyrant

    radonthetyrant

    Joined:
    Feb 14, 2014
    Posts:
    6
    Last edited: May 9, 2017
  27. Westerby

    Westerby

    Joined:
    Jun 20, 2017
    Posts:
    8
    Hello Everyone,

    the code posted by @brandon is very interesting and helpful. I've been trying to do the same thing as @yjlin5210 described, that is translating some x,y coordinates from frame passed onto quad, to x,y coordinates in the holographic world, with some fixed z. I tried some approaches: 1) calculations based on ScreenToWorldPoint, 2) Pixel to Application-specified Coordinate System as described here https://developer.microsoft.com/en-...el_to_application-specified_coordinate_system. Unfortunately no result was precise. In fact every time my holograms that I wanted to project on the specific pixel x,y coordinates were positioned on the left or on the right of the taken blended photo.

    Any help would be highly appreciated.
     
  28. unity_andrewc

    unity_andrewc

    Unity Technologies

    Joined:
    Dec 14, 2015
    Posts:
    218
    Hi Mlotek -

    I'm afraid I'm unable to puzzle out what exactly you're trying to do, and don't understand the problem you're running into either. Would you mind going into more detail, possibly pasting in some code and screen captures?
     
  29. Westerby

    Westerby

    Joined:
    Jun 20, 2017
    Posts:
    8
    Hi @unity_andrewc,

    let's say that I'm reading a frame from Hololens locatable camera, with size of 896x504. On a captured frame there is a point or many points of interest, be it a person's face, edge of an object or anything else I'm trying to detect, let's say it will be a tip of a person's nose, that have some x,y coordinates on my frame. What I want to do is to take those x,y coordinates, process them and display a hologram in a world space, but in a way that this hologram position corresponds with real object - a person's tip of a nose - in a real world environment.

    Since on hololens the Screen size and frame size are the same, I've been trying to do this:

    Code (CSharp):
    1. sphere.transform.position = Camera.main.ScreenToWorldPoint(new Vector3(known_x_from_frame,known_y_from_frame,fixed_z));
    When I pass a frame onto canvas the result is OK and the sphere displays on my object of interest. But on Hololens the result is not precise.

    After that I tried methods described at already quoted Microsoft website:

    Code (CSharp):
    1. Vector2 ImagePosZeroToOne = new Vector2((1.0F * known_x_from_frame / (1.0F * frameWidth), 1.0F - ((1.0F * known_y_from_frame / (1.0F * frameHeight)));
    2. Vector2 ImagePosProjected = ((ImagePosZeroToOne * 2.0F) - new Vector2(1.0F, 1.0F)); // -1 to 1 space
    3. Vector3 CameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(ImagePosProjected.x, ImagePosProjected.y, 15));
    4. Vector3 WorldSpaceRayPoint2 = cameraToWorldMatrix * CameraSpacePos; // ray point in world space
    I've been experimenting with every calculated measure, but with no precise result. I know that the depth parameter is difficult to achieve, so for now I would be more than happy only with having x,y coordinates of hologram precise enough, with some fixed z.

    I hope the provided description is better, but if you or anyone else needs more details I will be happy to provide. At the time of writing this message I can't post any screen shots, but if they are still needed, I will post them tomorrow.
     
  30. unity_andrewc

    unity_andrewc

    Unity Technologies

    Joined:
    Dec 14, 2015
    Posts:
    218
    I don't think that's an assumption you can make in the context of photo capture. The PhotoCapture API exposes a set of resolutions supported by the photo camera - you basically just pick one of them and fill out the cameraResolutionWidth and cameraResolutionHeight on the CameraParameters object you pass to StartPhotoModeAsync (see https://docs.unity3d.com/ScriptReference/VR.WSA.WebCam.PhotoCapture.html). Hope that helps.
     
  31. erics_vulcan

    erics_vulcan

    Joined:
    Apr 2, 2016
    Posts:
    4
    Hey everyone,

    We wrote an open source project (with lots of documentation!) called HoloLensCameraStream that allows you to do exactly what everyone here wants. It even has Brandon's shader in it (thanks!). I hope it helps.

    It only runs in Unity 5.6 right now. I'd like to upgrade it to Unity 2017/2018.x, but there is some math issue that has emerged. From what I can see, the problem is with this line:

    Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

    I don't know enough about what exactly is happening in this line, or why the functionality would change in Unity 2017. If anyone can give insight, I'll make the upgrade.
     

    Attached Files:

  32. erics_vulcan

    erics_vulcan

    Joined:
    Apr 2, 2016
    Posts:
    4
    Okay, now I think that there's something with the shader that doesn't work in 2017.x, but I don't know what it is.
     
  33. Buster-Chung

    Buster-Chung

    Joined:
    Jan 6, 2017
    Posts:
    1
    @BrandonFogerty Hi, I know this is some old post but I have a question. I am doing some project with this code, and I want to get a texture from the quad after blended so it is applied by two matrices. I used code from this picture, but it caused null exception.
    Does anyone know how to get the texture2D image from the quad?

    Thank you
     

    Attached Files:

  34. neurograph

    neurograph

    Joined:
    Jun 8, 2010
    Posts:
    24
    Hi @erics_vulcan and @BrandonFogerty

    How would you reverse the process to get (in hololens!) the Pixel Coordinates of a 3d object?

    Please help me, I'm using
    Code (CSharp):
    1. Camera.main.WorldToScreenPoint(go.transform.postion)
    But it isn't working for me...

    I would like to be able to use in C# something like the Application-specified Coordinate System to Pixel Coordinates process described by microsoft.

    Code (CSharp):
    1. // Usual 3d math:
    2. float4x4 WorldToCamera = inverse( CameraToWorld );
    3. float4 CameraSpacePos = mul( WorldToCamera, float4( WorldSpacePos.xyz, 1 ) ); // use 1 as the W component
    4. // Projection math:
    5. float4 ImagePosUnnormalized = mul( CameraProjection, float4( CameraSpacePos.xyz, 1 ) ); // use 1 as the W component
    6. float2 ImagePosProjected = ImagePosUnnormalized.xy / ImagePosUnnormalized.w; // normalize by W, gives -1 to 1 space
    7. float2 ImagePosZeroToOne = ( ImagePosProjected * 0.5 ) + float2( 0.5, 0.5 ); // good for GPU textures
    8. int2 PixelPos = int2( ImagePosZeroToOne.x * ImageWidth, ( 1 - ImagePosZeroToOne.y ) * ImageHeight ); // good for CPU textures
    Thank you very much!
     
  35. Xyy_1209

    Xyy_1209

    Joined:
    Nov 12, 2017
    Posts:
    6
    I have the same problem.Like this:
    InvalidOperationException: Operation is not valid due to the current state of the object
    System.Linq.Enumerable.First[Resolution] (IEnumerable`1 source)

    How can i deal with it?
     
  36. Unity_Wesley

    Unity_Wesley

    Unity Technologies

    Joined:
    Sep 17, 2015
    Posts:
    558
    Are you running on the HoloLens or in the editor?
     
  37. Xyy_1209

    Xyy_1209

    Joined:
    Nov 12, 2017
    Posts:
    6
    Running on the HoloLens.
     
  38. nischita

    nischita

    Joined:
    Dec 2, 2017
    Posts:
    4
    This is a great tutorial! works like a charm. Thank you!
     
  39. nischita

    nischita

    Joined:
    Dec 2, 2017
    Posts:
    4
    Hi, is there a way to show only a frame of area and somehow only capture that part from the HoloLens camera?
     
  40. simuleiro

    simuleiro

    Joined:
    Mar 31, 2019
    Posts:
    1
    Great tutorial! Thank you
     
  41. berryjohnson

    berryjohnson

    Joined:
    Sep 6, 2019
    Posts:
    2
    Holographic Photo Blending is the amazing captured image data and it is so useful device this year.
     
  42. quixr

    quixr

    Joined:
    Sep 12, 2018
    Posts:
    2
    MRTK2 Hololens 2 capture not working, just white quad in space.
     
  43. AndHog

    AndHog

    Joined:
    Sep 12, 2019
    Posts:
    5
    Has anyone else tried this with Hololens 2? It seems TryGetCameraToWorldMatrix and TryGetProjectionMatrix does not return the correct values. Always returning the same values for me (using current latest Unity 2019 LTS, 2019.4.13), so the position of the quad is never updated.
     
  44. cookieofcode

    cookieofcode

    Joined:
    Apr 12, 2020
    Posts:
    1
    @AndHog The image blending works fine for me on the HoloLens 2, MRTK 2.4.0, and Unity 2019.4.11f using @erics_vulcan library HoloLensCameraStream. Check out the Projection Example.

    As for now, I assume that the "camera view transform" and "projection transform" are only available using VideoPreview instead of VideoRecord on the HoloLens 2.

    Microsoft provides a helper script to convert System.Numerics.Matrix4x4 to UnityEngine.Matrix4x4. But they define the conversion "ToUnity()" different than the HoloLensCameraStream library. Maybe this could be related?
     
  45. AndHog

    AndHog

    Joined:
    Sep 12, 2019
    Posts:
    5
    Amazing, after some fiddling I got the example to work great! Now I just need to implement this into my current project :)

    Thanks a bunch @cookieofcode !!

    edit:
    I still hope unity fixes this in the future, but now I can at least verify current solution. Could be related to that as you say, we'll see going forward hopefully.
     
    Last edited: Nov 10, 2020
  46. arianaash

    arianaash

    Joined:
    Aug 24, 2018
    Posts:
    1
    Thank you so much for your tutorial!
    I could follow your scripts and it is running perfectly!
    The only problem is that I need to record the view of the user but although the canvas and detected faces exist in the normal field of view of the user, could not be observable from device portal, recorded videos or captured photo from the view. Could you please guide me on how I could be able to capture them in the recorded video and pictures?
     
    Last edited: Jul 9, 2021
  47. xiaoshuangs

    xiaoshuangs

    Joined:
    Jan 2, 2020
    Posts:
    1
    Hololens 2 It seems TryGetCameraToWorldMatrix and TryGetProjectionMatrix does not return the correct values. Always returning the same values for me (using current latest Unity 2019 LTS, 2019.4.13)
     
  48. Mia_white

    Mia_white

    Joined:
    Apr 15, 2022
    Posts:
    13
    thanks for this
     
  49. flashchen7

    flashchen7

    Joined:
    Sep 21, 2023
    Posts:
    1
    Have you solved this problem?