ARCore深度渲染问题分析
文章目录
- 1.前言
- 2.深度图显示
- 3.深度遮挡
- 3.1 处理流程
- 3.2 相关代码
- 4.结语
ARCore背景渲染
2)在update中更新真实场景深度图(Frame.CameraImage.UpdateDepthTexture(ref
m_DepthTexture);),通过CommandBuffer在不透明物体渲染结束后,通过_CameraDepthTexture获取虚拟场景的深度值,并将深度值处理后的结果存储到图片的a通道,传递给下一步处理。根据如下代码显示,还做了blur处理,但是根据参数显示,blur效果有限或者说没有。
m_Camera = Camera.main;
m_Camera.depthTextureMode |= DepthTextureMode.Depth;
m_DepthBuffer = new CommandBuffer();
m_DepthBuffer.name = "Auxilary occlusion textures";
// Creates the occlusion map.
int occlusionMapTextureID = Shader.PropertyToID("_OcclusionMap");
m_DepthBuffer.GetTemporaryRT(occlusionMapTextureID, -1, -1, 0, FilterMode.Bilinear);
// Pass #0 renders an auxilary buffer - occlusion map that indicates the
// regions of virtual objects that are behind real geometry.
m_DepthBuffer.Blit(
BuiltinRenderTextureType.CameraTarget,
occlusionMapTextureID, m_DepthMaterial, /*pass=*/ 0);
// Blurs the occlusion map.
m_DepthBuffer.SetGlobalTexture("_OcclusionMapBlurred", occlusionMapTextureID);
m_Camera.AddCommandBuffer(CameraEvent.AfterForwardOpaque, m_DepthBuffer);
m_Camera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_DepthBuffer);
m_Camera = Camera.main;
m_Camera.depthTextureMode |= DepthTextureMode.Depth;
m_DepthBuffer = new CommandBuffer();
m_DepthBuffer.name = "Auxilary occlusion textures";
// Creates the occlusion map.
int occlusionMapTextureID = Shader.PropertyToID("_OcclusionMap");
m_DepthBuffer.GetTemporaryRT(occlusionMapTextureID, -1, -1, 0, FilterMode.Bilinear);
// Pass #0 renders an auxilary buffer - occlusion map that indicates the
// regions of virtual objects that are behind real geometry.
m_DepthBuffer.Blit(
BuiltinRenderTextureType.CameraTarget,
occlusionMapTextureID, m_DepthMaterial, /*pass=*/ 0);
// Blurs the occlusion map.
m_DepthBuffer.SetGlobalTexture("_OcclusionMapBlurred", occlusionMapTextureID);
m_Camera.AddCommandBuffer(CameraEvent.AfterForwardOpaque, m_DepthBuffer);
m_Camera.AddCommandBuffer(CameraEvent.AfterGBuffer, m_DepthBuffer);
通过OcclusionImageEffect shader中的pass 0,对深度进行处理。首先通过采样获取到真实深度和虚拟深度,然后计算一个occlusionAlpha值。当虚拟深度与真实深度差别较大时,且真实深度值较小时occlusionAlpha为1,反之为0;如果两者差别极小时则为0-1之间数据。
float occlusionAlpha =
1.0 - saturate(0.5 * (depthMeters - virtualDepth) /
(_TransitionSizeMeters * virtualDepth) + 0.5);
3)在后处理过程中(OnRenderImage)根据第二步计算的occlusionAlpha值来决定是否显示虚拟物体
3.2 相关代码
实现的CS代码(DepthEffect)如下所示:
[RequireComponent(typeof(Camera))]
public class DepthEffect : MonoBehaviour
{
///
/// The global shader property name for the camera texture.
///
public const string BackgroundTexturePropertyName = "_BackgroundTexture";
///
/// The image effect shader to blit every frame with.
///
public Shader OcclusionShader;
///
/// The blur kernel size applied to the camera feed. In pixels.
///
[Space]
public float BlurSize = 20f;
///
/// The number of times occlusion map is downsampled before blurring. Useful for
/// performance optimization. The value of 1 means no downsampling, each next one
/// downsamples by 2.
///
public int BlurDownsample = 2;
///
/// Maximum occlusion transparency. The value of 1.0 means completely invisible when
/// occluded.
///
[Range(0, 1)]
public float OcclusionTransparency = 1.0f;
///
/// The bias added to the estimated depth. Useful to avoid occlusion of objects anchored
/// to planes. In meters.
///
[Space]
public float OcclusionOffset = 0.08f;
///
/// Velocity occlusions effect fades in/out when being enabled/disabled.
///
public float OcclusionFadeVelocity = 4.0f;
///
/// Instead of a hard z-buffer test, allows the asset to fade into the background
/// gradually. The parameter is unitless, it is a fraction of the distance between the
/// camera and the virtual object where blending is applied.
///
public float TransitionSize = 0.1f;
private static readonly string k_CurrentDepthTexturePropertyName = "_CurrentDepthTexture";
private static readonly string k_TopLeftRightPropertyName = "_UvTopLeftRight";
private static readonly string k_BottomLeftRightPropertyName = "_UvBottomLeftRight";
private Camera m_Camera;
private Material m_DepthMaterial;
private Texture2D m_DepthTexture;
private float m_CurrentOcclusionTransparency = 1.0f;
private ARCoreBackgroundRenderer m_BackgroundRenderer;
private CommandBuffer m_DepthBuffer;
private CommandBuffer m_BackgroundBuffer;
private int m_BackgroundTextureID = -1;
///
/// Unity's Awake() method.
///
public void Awake()
{
m_CurrentOcclusionTransparency = OcclusionTransparency;
Debug.Assert(OcclusionShader != null, "Occlusion Shader parameter must be set.");
m_DepthMaterial = new Material(OcclusionShader);
m_DepthMaterial.SetFloat("_OcclusionTransparency", m_CurrentOcclusionTransparency);
m_DepthMaterial.SetFloat("_OcclusionOffsetMeters", OcclusionOffset);
m_DepthMaterial.SetFloat("_TransitionSize", TransitionSize);
// Default texture, will be updated each frame.
m_DepthTexture = new Texture2D(2, 2);
m_DepthTexture.filterMode = FilterMode.Bilinear;
m_DepthMaterial.SetTexture(k_CurrentDepthTexturePropertyName, m_DepthTexture);
m_Camera = Camera.main;
m_Camera.depthTextureMode |= DepthTextureMode.Depth;
m_DepthBuffer = new CommandBuffer();
m_DepthBuffer.name = "Auxilary occlusion textures";
// Creates the occlusion map.
int occlusionMapTextureID = Shader.PropertyToID("_OcclusionMap");
m_DepthBuffer.GetTemporaryRT(occlusionMapTextureID, -1, -1, 0, FilterMode.Bilinear);