ARCore深度渲染问题分析


文章目录

  • 1.前言
  • 2.深度图显示
  • 3.深度遮挡
    • 3.1 处理流程
    • 3.2 相关代码
  • 4.结语

ARCore背景渲染
2)在update中更新真实场景深度图(Frame.CameraImage.UpdateDepthTexture(ref m_DepthTexture);),通过CommandBuffer在不透明物体渲染结束后,通过_CameraDepthTexture获取虚拟场景的深度值,并将深度值处理后的结果存储到图片的a通道,传递给下一步处理。根据如下代码显示,还做了blur处理,但是根据参数显示,blur效果有限或者说没有。

            m_Camera = Camera.main;
            m_Camera.depthTextureMode |= DepthTextureMode.Depth;

            m_DepthBuffer = new CommandBuffer();
            m_DepthBuffer.name = "Auxilary occlusion textures";

            // Creates the occlusion map.
            int occlusionMapTextureID = Shader.PropertyToID("_OcclusionMap");
            m_DepthBuffer.GetTemporaryRT(occlusionMapTextureID, -1, -1, 0, FilterMode.Bilinear);

            // Pass #0 renders an auxilary buffer - occlusion map that indicates the
            // regions of virtual objects that are behind real geometry.
            m_DepthBuffer.Blit(
                BuiltinRenderTextureType.CameraTarget,
                occlusionMapTextureID, m_DepthMaterial, /*pass=*/ 0);

            // Blurs the occlusion map.
            m_DepthBuffer.SetGlobalTexture("_OcclusionMapBlurred", occlusionMapTextureID);

            m_Camera.AddCommandBuffer(CameraEvent.AfterForwardOpaque, m_DepthBuffer);
            m_Camera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_DepthBuffer);

通过OcclusionImageEffect shader中的pass 0,对深度进行处理。首先通过采样获取到真实深度和虚拟深度,然后计算一个occlusionAlpha值。当虚拟深度与真实深度差别较大时,且真实深度值较小时occlusionAlpha为1,反之为0;如果两者差别极小时则为0-1之间数据。

                float occlusionAlpha =
                    1.0 - saturate(0.5 * (depthMeters - virtualDepth) /
                    (_TransitionSizeMeters * virtualDepth) + 0.5);

3)在后处理过程中(OnRenderImage)根据第二步计算的occlusionAlpha值来决定是否显示虚拟物体

3.2 相关代码

实现的CS代码(DepthEffect)如下所示:

    [RequireComponent(typeof(Camera))]
    public class DepthEffect : MonoBehaviour
    {
        /// 
        /// The global shader property name for the camera texture.
        /// 
        public const string BackgroundTexturePropertyName = "_BackgroundTexture";

        /// 
        /// The image effect shader to blit every frame with.
        /// 
        public Shader OcclusionShader;

        /// 
        /// The blur kernel size applied to the camera feed. In pixels.
        /// 
        [Space]
        public float BlurSize = 20f;

        /// 
        /// The number of times occlusion map is downsampled before blurring. Useful for
        /// performance optimization. The value of 1 means no downsampling, each next one
        /// downsamples by 2.
        /// 
        public int BlurDownsample = 2;

        /// 
        /// Maximum occlusion transparency. The value of 1.0 means completely invisible when
        /// occluded.
        /// 
        [Range(0, 1)]
        public float OcclusionTransparency = 1.0f;

        /// 
        /// The bias added to the estimated depth. Useful to avoid occlusion of objects anchored
        /// to planes. In meters.
        /// 
        [Space]
        public float OcclusionOffset = 0.08f;

        /// 
        /// Velocity occlusions effect fades in/out when being enabled/disabled.
        /// 
        public float OcclusionFadeVelocity = 4.0f;

        /// 
        /// Instead of a hard z-buffer test, allows the asset to fade into the background
        /// gradually. The parameter is unitless, it is a fraction of the distance between the
        /// camera and the virtual object where blending is applied.
        /// 
        public float TransitionSize = 0.1f;

        private static readonly string k_CurrentDepthTexturePropertyName = "_CurrentDepthTexture";
        private static readonly string k_TopLeftRightPropertyName = "_UvTopLeftRight";
        private static readonly string k_BottomLeftRightPropertyName = "_UvBottomLeftRight";

        private Camera m_Camera;
        private Material m_DepthMaterial;
        private Texture2D m_DepthTexture;
        private float m_CurrentOcclusionTransparency = 1.0f;
        private ARCoreBackgroundRenderer m_BackgroundRenderer;
        private CommandBuffer m_DepthBuffer;
        private CommandBuffer m_BackgroundBuffer;
        private int m_BackgroundTextureID = -1;

        /// 
        /// Unity's Awake() method.
        /// 
        public void Awake()
        {
            m_CurrentOcclusionTransparency = OcclusionTransparency;

            Debug.Assert(OcclusionShader != null, "Occlusion Shader parameter must be set.");
            m_DepthMaterial = new Material(OcclusionShader);
            m_DepthMaterial.SetFloat("_OcclusionTransparency", m_CurrentOcclusionTransparency);
            m_DepthMaterial.SetFloat("_OcclusionOffsetMeters", OcclusionOffset);
            m_DepthMaterial.SetFloat("_TransitionSize", TransitionSize);

            // Default texture, will be updated each frame.
            m_DepthTexture = new Texture2D(2, 2);
            m_DepthTexture.filterMode = FilterMode.Bilinear;
            m_DepthMaterial.SetTexture(k_CurrentDepthTexturePropertyName, m_DepthTexture);

            m_Camera = Camera.main;
            m_Camera.depthTextureMode |= DepthTextureMode.Depth;

            m_DepthBuffer = new CommandBuffer();
            m_DepthBuffer.name = "Auxilary occlusion textures";

            // Creates the occlusion map.
            int occlusionMapTextureID = Shader.PropertyToID("_OcclusionMap");
            m_DepthBuffer.GetTemporaryRT(occlusionMapTextureID, -1, -1, 0, FilterMode.Bilinear);

            // Pass #0 renders an auxilary buffer - occlusion map that indicates the
            // regions of virtual objects that are behind real geometry.
            m_DepthBuffer.Blit(
                BuiltinRenderTextureType.CameraTarget,
                occlusionMapTextureID, m_DepthMaterial, /*pass=*/ 0);

            // Blurs the occlusion map.
            m_DepthBuffer.SetGlobalTexture("_OcclusionMapBlurred", occlusionMapTextureID);

            m_Camera.AddCommandBuffer(CameraEvent.AfterForwardOpaque, m_DepthBuffer);
            m_Camera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_DepthBuffer);

            m_BackgroundRenderer = FindObjectOfType>();
            if (m_BackgroundRenderer == null)
            {
                Debug.LogError("BackgroundTextureProvider requires ARCoreBackgroundRenderer " +
                    "anywhere in the scene.");
                return;
            }

            m_BackgroundBuffer = new CommandBuffer();
            m_BackgroundBuffer.name = "Camera texture";
            m_BackgroundTextureID = Shader.PropertyToID(BackgroundTexturePropertyName);
            m_BackgroundBuffer.GetTemporaryRT(m_BackgroundTextureID,
                /*width=*/
                -1, /*height=*/ -1,
                /*depthBuffer=*/
                0, FilterMode.Bilinear);

            var material = m_BackgroundRenderer.BackgroundMaterial;
            if (material != null)
            {
                m_BackgroundBuffer.Blit(material.mainTexture, m_BackgroundTextureID, material);
            }

            m_BackgroundBuffer.SetGlobalTexture(
                BackgroundTexturePropertyName, m_BackgroundTextureID);
            m_Camera.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, m_BackgroundBuffer);
            m_Camera.AddCommandBuffer(CameraEvent.BeforeGBuffer, m_BackgroundBuffer);
        }

        /// 
        /// Unity's Update() method.
        /// 
        public void Update()
        {
            m_CurrentOcclusionTransparency +=
                (OcclusionTransparency - m_CurrentOcclusionTransparency) *
                Time.deltaTime * OcclusionFadeVelocity;

            m_CurrentOcclusionTransparency =
                Mathf.Clamp(m_CurrentOcclusionTransparency, 0.0f, OcclusionTransparency);
            m_DepthMaterial.SetFloat("_OcclusionTransparency", m_CurrentOcclusionTransparency);
            m_DepthMaterial.SetFloat("_TransitionSize", TransitionSize);
            Shader.SetGlobalFloat("_BlurSize", BlurSize / BlurDownsample);

            // Gets the latest depth map from ARCore.
            Frame.CameraImage.UpdateDepthTexture(ref m_DepthTexture);

            // Updates the screen orientation for each material.
            _UpdateScreenOrientationOnMaterial();
        }

        /// 
        /// Unity's OnEnable() method.
        /// 
        public void OnEnable()
        {
            if (m_DepthBuffer != null)
            {
                m_Camera.AddCommandBuffer(CameraEvent.AfterForwardOpaque, m_DepthBuffer);
                m_Camera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_DepthBuffer);
            }

            if (m_BackgroundBuffer != null)
            {
                m_Camera.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, m_BackgroundBuffer);
                m_Camera.AddCommandBuffer(CameraEvent.BeforeGBuffer, m_BackgroundBuffer);
            }
        }

        /// 
        /// Unity's OnDisable() method.
        /// 
        public void OnDisable()
        {
            if (m_DepthBuffer != null)
            {
                m_Camera.RemoveCommandBuffer(CameraEvent.AfterForwardOpaque, m_DepthBuffer);
                m_Camera.RemoveCommandBuffer(CameraEvent.AfterGBuffer, m_DepthBuffer);
            }

            if (m_BackgroundBuffer != null)
            {
                m_Camera.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, m_BackgroundBuffer);
                m_Camera.RemoveCommandBuffer(CameraEvent.BeforeGBuffer, m_BackgroundBuffer);
            }
        }

        private void OnRenderImage(RenderTexture source, RenderTexture destination)
        {
            // Only render the image when tracking.
            if (Session.Status != SessionStatus.Tracking)
            {
                return;
            }

            // Pass #1 combines virtual and real cameras based on the occlusion map.
            Graphics.Blit(source, destination, m_DepthMaterial, /*pass=*/ 1);
        }

        /// 
        /// Updates the screen orientation of the depth map.
        /// 
        private void _UpdateScreenOrientationOnMaterial()
        {
            var uvQuad = Frame.CameraImage.TextureDisplayUvs;
            m_DepthMaterial.SetVector(
                k_TopLeftRightPropertyName,
                new Vector4(
                    uvQuad.TopLeft.x, uvQuad.TopLeft.y, uvQuad.TopRight.x, uvQuad.TopRight.y));
            m_DepthMaterial.SetVector(
                k_BottomLeftRightPropertyName,
                new Vector4(uvQuad.BottomLeft.x, uvQuad.BottomLeft.y, uvQuad.BottomRight.x,
                    uvQuad.BottomRight.y));
        }
    }

shader则为OcclusionImageEffect:

Shader "Hidden/OcclusionImageEffect"
{
    Properties
    {
        _MainTex ("Main Texture", 2D) = "white" {}  // Depth texture.
        _UvTopLeftRight ("UV of top corners", Vector) = (0, 1, 1, 1)
        _UvBottomLeftRight ("UV of bottom corners", Vector) = (0 , 0, 1, 0)
        _OcclusionTransparency ("Maximum occlusion transparency", Range(0, 1)) = 1
        _OcclusionOffsetMeters ("Occlusion offset [meters]", Float) = 0
        _TransitionSizeMeters ("Transition size [meters]", Float) = 0.05
    }

    SubShader
    {
        Cull Off ZWrite Off ZTest Always

        CGINCLUDE

        #include "UnityCG.cginc"

        struct appdata
        {
            float4 vertex : POSITION;
            float2 uv : TEXCOORD0;
        };

        struct v2f
        {
            float2 uv : TEXCOORD0;
            float4 vertex : SV_POSITION;
        };

        v2f vert (appdata v)
        {
            v2f o;
            o.vertex = UnityObjectToClipPos(v.vertex);
            o.uv = v.uv;
            return o;
        }
        ENDCG

        // Pass #0 renders an auxilary buffer - occlusion map that indicates the
        // regions of virtual objects that are behind real geometry.
        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag

            #include "../../../../SDK/Materials/ARCoreDepth.cginc"

            sampler2D _CameraDepthTexture;
            sampler2D _BackgroundTexture;
            bool _UseDepthFromPlanes;

            float _TransitionSizeMeters;

            fixed4 frag (v2f i) : SV_Target
            {
                float depthMeters = 0.0;
                if (_UseDepthFromPlanes)
                {
                    depthMeters = tex2Dlod(_CurrentDepthTexture, float4(i.uv, 0, 0)).r
                                    * ARCORE_MAX_DEPTH_MM;
                    depthMeters *= ARCORE_DEPTH_SCALE;
                }
                else
                {
                    float2 depthUv = ArCoreDepth_GetUv(i.uv);
                    depthMeters = ArCoreDepth_GetMeters(depthUv);
                }

                float virtualDepth = LinearEyeDepth(
                    SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, i.uv)) -
                    _OcclusionOffsetMeters;

                // Far plane minus near plane.
                float maxVirtualDepth =
                    _ProjectionParams.z - _ProjectionParams.y;

                float occlusionAlpha =
                    1.0 - saturate(0.5 * (depthMeters - virtualDepth) /
                    (_TransitionSizeMeters * virtualDepth) + 0.5);

                // Masks out only the fragments with virtual objects.
                occlusionAlpha *= saturate(maxVirtualDepth - virtualDepth);

                // At this point occlusionAlpha is equal to 1.0 only for fully
                // occluded regions of the virtual objects.
                fixed4 background = tex2D(_BackgroundTexture, i.uv);

                return fixed4(background.rgb, occlusionAlpha);
            }
            ENDCG
        }

        // Pass #1 combines virtual and real cameras based on the occlusion map.
        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag

            sampler2D _MainTex;
            sampler2D _OcclusionMapBlurred;
            sampler2D _BackgroundTexture;

            fixed _OcclusionTransparency;

            fixed4 frag (v2f i) : SV_Target
            {
                fixed4 input = tex2D(_MainTex, i.uv);
                fixed4 background = tex2D(_BackgroundTexture, i.uv);
                fixed4 occlusionBlurred = tex2D(_OcclusionMapBlurred, i.uv);
                float objectMask = occlusionBlurred.a;

                // The virtual object mask is blurred, we make the falloff
                // steeper to simulate erosion operator. This is needed to make
                // the fully occluded virtual object invisible.
                float objectMaskEroded = pow(objectMask, 10);

                // occlusionTransition equal to 1 means fully occluded object.
                // This operation boosts occlusion near the edges of the virtual
                // object, but does not affect occlusion within the object.
                float occlusionTransition =
                    saturate(occlusionBlurred.a * (2.0 - objectMaskEroded));

                // Clips occlusion if we want to partially show occluded object.
                occlusionTransition = min(occlusionTransition, _OcclusionTransparency);

                return lerp(input, background, occlusionTransition);
            }
            ENDCG
        }
    }
}

4.结语

处理流程较为复杂,简单使用时还有优化的空间。
提供一种不开启混合(Blend)时的半透明效果,即通过CommandBuffer获取不同时期的图片,然后根据透明度(alpha)进行插值混合。

对于3d遮挡问题,如果需要三维mesh重建自然没有问题,但是此时需要将mesh显示出来,又涉及到效率问题。此处提供一种解决思路,即通过三维重建网格数据,但进行绘制是使用unity的shadowcaster方式, 只写入深度,不做渲染,然后通过深度测试,自动实现3d物体遮挡。