渲染功能可切换 2 个摄像机的位置,并且法线边缘检测在构建中不起作用

Render features switch places for 2 cameras and normals edge detection not working in build

提问人:Notrum666 提问时间:11/13/2023 最后编辑:Notrum666 更新时间:11/13/2023 访问量:37

问:

我正在尝试在使用 URP 的渲染功能时创建分屏(该功能添加了 3 个渲染通道 - 第一个用于获取法线纹理,第二个用于基于深度和法线的边缘检测,第三个用于添加距离雾)。出于某种原因,当我有 2 台相机时,效果会切换位置:

Swapped render features

如您所见,顶部摄像头具有底部摄像头的效果,底部摄像头具有顶部摄像头的效果(简短说明:摄像头位于管中,顶部摄像头位于右侧边框,底部摄像头位于左侧边界,因此顶部摄像头的雾必须向左,而底部摄像头的雾气必须向右, 但恰恰相反,不是混合,不是缺失,而是出于某种原因切换) 以下是特征代码:

public class URP_Feature : ScriptableRendererFeature
{
class ViewNormalsTexturePass : ScriptableRenderPass
{
    private readonly List<ShaderTagId> shaderIdTagList;
    private readonly RenderTargetHandle normals;
    private readonly Material normalsMaterial;

    public ViewNormalsTexturePass(RenderPassEvent renderPassEvent)
    {
        normalsMaterial = new Material(Shader.Find("Shader Graphs/ViewSpaceNormalsShader"));
        shaderIdTagList = new List<ShaderTagId>()
        {
            new ShaderTagId("UniversalForward"),
            new ShaderTagId("UniversalForwardOnly"),
            new ShaderTagId("LightweightForward"),
            new ShaderTagId("SRPDefaultUnlit")
        };
        this.renderPassEvent = renderPassEvent;
        normals.Init("_SceneViewSpaceNormals");
    }
    public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
    {
    }

    public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
    {
        base.Configure(cmd, cameraTextureDescriptor);

        cmd.GetTemporaryRT(normals.id, cameraTextureDescriptor, FilterMode.Point);
        ConfigureTarget(normals.Identifier());
        ConfigureClear(ClearFlag.All, Color.white);
    }

    public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
    {
        if (!normalsMaterial)
            return;

        CommandBuffer cmd = CommandBufferPool.Get();
        using (new ProfilingScope(cmd, new ProfilingSampler("SceneViewSpaceNormalsTextureCreation")))
        {
            context.ExecuteCommandBuffer(cmd);
            cmd.Clear();
            DrawingSettings drawingSettings = CreateDrawingSettings(shaderIdTagList, ref renderingData, renderingData.cameraData.defaultOpaqueSortFlags);
            drawingSettings.overrideMaterial = normalsMaterial;
            FilteringSettings filteringSettings = FilteringSettings.defaultValue;
            context.DrawRenderers(renderingData.cullResults, ref drawingSettings, ref filteringSettings);
        }
        context.ExecuteCommandBuffer(cmd);
        CommandBufferPool.Release(cmd);
    }

    // Cleanup any allocated resources that were created during the execution of this render pass.
    public override void OnCameraCleanup(CommandBuffer cmd)
    {
        cmd.ReleaseTemporaryRT(normals.id);
    }
}
class ScreenSpaceOutlinesPass : ScriptableRenderPass
{
    private readonly Material screenSpaceOutlineMaterial;
    private RenderTargetIdentifier cameraColorTarget;
    private RenderTargetIdentifier temporaryBuffer;
    private int temporaryBufferID = Shader.PropertyToID("_TemporaryBuffer");
    public ScreenSpaceOutlinesPass(RenderPassEvent renderPassEvent)
    {
        this.renderPassEvent = renderPassEvent;
        screenSpaceOutlineMaterial = new Material(Shader.Find("Shader Graphs/OutlineShader"));
    }
    public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
    {
        cameraColorTarget = renderingData.cameraData.renderer.cameraColorTarget;
    }
    public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
    {
        if (!screenSpaceOutlineMaterial)
            return;

        CommandBuffer cmd = CommandBufferPool.Get();
        using (new ProfilingScope(cmd, new ProfilingSampler("ScreenSpaceOutlines")))
        {
            context.ExecuteCommandBuffer(cmd);
            cmd.Clear();

            Blit(cmd, cameraColorTarget, temporaryBuffer);
            Blit(cmd, temporaryBuffer, cameraColorTarget, screenSpaceOutlineMaterial);
        }
        context.ExecuteCommandBuffer(cmd);
        CommandBufferPool.Release(cmd);
    }

    // Cleanup any allocated resources that were created during the execution of this render pass.
    public override void OnCameraCleanup(CommandBuffer cmd)
    {
    }
}
class FogPass : ScriptableRenderPass
{
    private readonly Material fogMaterial;
    private RenderTargetIdentifier cameraColorTarget;
    private RenderTargetIdentifier temporaryBuffer2;
    private int temporaryBufferID2 = Shader.PropertyToID("_TemporaryBuffer2");
    public FogPass(RenderPassEvent renderPassEvent)
    {
        this.renderPassEvent = renderPassEvent;
        fogMaterial = new Material(Shader.Find("Shader Graphs/FogShader"));
    }
    public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
    {
        cameraColorTarget = renderingData.cameraData.renderer.cameraColorTarget;
    }
    public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
    {
        if (!fogMaterial)
            return;

        CommandBuffer cmd = CommandBufferPool.Get();
        using (new ProfilingScope(cmd, new ProfilingSampler("Fog")))
        {
            context.ExecuteCommandBuffer(cmd);
            cmd.Clear();

            Blit(cmd, cameraColorTarget, temporaryBuffer2);
            Blit(cmd, temporaryBuffer2, cameraColorTarget, fogMaterial);
        }
        context.ExecuteCommandBuffer(cmd);
        CommandBufferPool.Release(cmd);
    }

    // Cleanup any allocated resources that were created during the execution of this render pass.
    public override void OnCameraCleanup(CommandBuffer cmd)
    {
    }
}

[SerializeField]
private RenderPassEvent renderPassEvent;
ViewNormalsTexturePass viewNormalsTexturePass;
ScreenSpaceOutlinesPass screenSpaceOutlinesPass;
FogPass fogPass;

/// <inheritdoc/>
public override void Create()
{
    viewNormalsTexturePass = new ViewNormalsTexturePass(renderPassEvent);
    viewNormalsTexturePass.renderPassEvent = RenderPassEvent.AfterRenderingOpaques;

    screenSpaceOutlinesPass = new ScreenSpaceOutlinesPass(renderPassEvent);
    screenSpaceOutlinesPass.renderPassEvent = RenderPassEvent.AfterRenderingOpaques;

    fogPass = new FogPass(renderPassEvent);
    fogPass.renderPassEvent = RenderPassEvent.AfterRenderingOpaques;
}

// Here you can inject one or multiple render passes in the renderer.
// This method is called when setting up the renderer once per-camera.
public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
{
    renderer.EnqueuePass(viewNormalsTexturePass);
    renderer.EnqueuePass(screenSpaceOutlinesPass);
    renderer.EnqueuePass(fogPass);
}
}

如果您需要查看使用的 3 个着色器图中的任何一个或一些项目设置 - 只需询问(因为可能涉及很多东西,并且在此处包含所有这些内容太多了)

也可能是相关的问题 - 由于某种原因,使用法线的边缘检测在编辑器中工作正常,但在构建版本中只有深度检测有效,我不知道为什么,这是与上面相同场景的图像(只是使用切换的相机,因为它是另一个连接的客户端):

enter image description here

有什么想法为什么吗?

C# unity-game-engine 渲染 边缘检测 urp

评论


答: 暂无答案