5

我正在使用 SlimDX,针对带有着色器模型 4 的 DirectX 11。我有一个像素着色器“preProc”,它处理我的顶点并保存三个数据纹理。一个用于每个像素的法线,一个用于每个像素的位置数据,一个用于颜色和深度(颜色占用 rgb,深度占用 alpha 通道)。

然后我稍后在后处理着色器中使用这些纹理以实现屏幕空间环境光遮蔽,但似乎没有数据保存在第一个着色器中。

这是我的像素着色器:

PS_OUT PS( PS_IN input )
{
    PS_OUT output;
    output.col = float4(0,0,0,0);
    output.norm = float4(input.norm,1);
    output.pos = input.pos;
    return output;
}

它输出以下结构:

struct PS_OUT
{
    float4 col : SV_TARGET0;
    float4 norm : SV_TARGET1;
    float4 pos : SV_TARGET2;
};

并采用以下结构作为输入:

struct PS_IN
{
    float4 pos : SV_POSITION;
    float2 tex : TEXCOORD0;
    float3 norm : TEXCOORD1;
};

但是在我的后处理着色器中:

Texture2D renderTex : register(t1);
Texture2D normalTex : register(t2);
Texture2D positionTex : register(t3);
Texture2D randomTex : register(t4);
SamplerState samLinear : register(s0);

float4 PS(PS_IN input) : SV_Target
{
    return float4(getCol(input.tex));
}

它只是输出一个浅蓝色屏幕(我将渲染目标重置为每帧开始时的颜色)。getCol 已经过测试,可以在仅处理一个渲染目标时从 renderTex 材质返回颜色。如果我将像素着色器更改为对 randomTex 纹理(我的代码之前从文件中加载并且不是渲染目标)进行采样,那么一切都会很好地渲染,所以我确信它不是我的后处理着色器。

如果我的 slimDX 代码失败了,我会这样做:

创建我的纹理、shaderresourvecviews 和 rendertargetviews:

Texture2DDescription textureDescription = new Texture2DDescription()
                {
                    Width=texWidth,
                    Height=texHeight,
                    MipLevels=1,
                    ArraySize=3,
                    Format=SlimDX.DXGI.Format.R32G32B32A32_Float,
                    SampleDescription = new SlimDX.DXGI.SampleDescription(1,0),
                    BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource,
                    CpuAccessFlags= CpuAccessFlags.None,
                    OptionFlags = ResourceOptionFlags.None,
                    Usage= ResourceUsage.Default,
                };
            texture = new Texture2D(device, textureDescription);

            renderTargetView = new RenderTargetView[3];
            shaderResourceView = new ShaderResourceView[3];

            for (int i = 0; i < 3; i++)
            {
                RenderTargetViewDescription renderTargetViewDescription = new RenderTargetViewDescription()
                    {
                        Format = textureDescription.Format,
                        Dimension = RenderTargetViewDimension.Texture2D,
                        MipSlice = 0,
                    };

                renderTargetView[i] = new RenderTargetView(device, texture, renderTargetViewDescription);

                ShaderResourceViewDescription shaderResourceViewDescription = new ShaderResourceViewDescription()
                    {
                        Format = textureDescription.Format,
                        Dimension = ShaderResourceViewDimension.Texture2D,
                        MostDetailedMip = 0,
                        MipLevels = 1
                    };

                shaderResourceView[i] = new ShaderResourceView(device, texture, shaderResourceViewDescription);
            }

渲染到我的多个渲染目标:

private void renderToTexture(Shader shader)
    {
        //set the vertex and pixel shaders
        context.VertexShader.Set(shader.VertexShader);
        context.PixelShader.Set(shader.PixelShader);

        //send texture data and a linear sampler to the shader
        context.PixelShader.SetShaderResource(texture, 0);
        context.PixelShader.SetSampler(samplerState, 0);

        //set the input assembler
        SetInputAssembler(shader);

        //reset the camera's constant buffer
        camera.ResetConstantBuffer();

        //set the render targets to the textures we will render to
        context.OutputMerger.SetTargets(depthStencilView, renderTargetViews);
        //clear the render targets and depth stencil
        foreach (RenderTargetView view in renderTargetViews)
        {
             context.ClearRenderTargetView(view, color);
        }
        context.ClearDepthStencilView(depthStencilView, DepthStencilClearFlags.Depth, 1.0f, 0);
            //draw the scene
            DrawScene();

        }

然后是我将 postProcessing 着色器渲染到屏幕时的函数:

private void renderTexture(Shader shader)
        {
            //get a single quad to be the screen we render
            Mesh mesh = CreateScreenFace();
            //set vertex and pixel shaders
            context.VertexShader.Set(shader.VertexShader);
            context.PixelShader.Set(shader.PixelShader);
            //set the input assembler
            SetInputAssembler(shader);
            //point the render target to the screen
            context.OutputMerger.SetTargets(depthStencil, renderTarget);
            //send the rendered textures and a linear sampler to the shader
                context.PixelShader.SetShaderResource(renderTargetViews[0], 1);
            context.PixelShader.SetShaderResource(renderTargetViews[1], 2);
            context.PixelShader.SetShaderResource(renderTargetViews[2], 3);
            context.PixelShader.SetShaderResource(random, 4);
            context.PixelShader.SetSampler(samplerState, 0);
            //clear the render targets and depth stencils
            context.ClearRenderTargetView(renderTarget, new Color4(0.52734375f, 0.8046875f, 0.9765625f));
            context.ClearDepthStencilView(depthStencil, DepthStencilClearFlags.Depth, 1, 0);
            //set the vertex and index buffers from the quad
            context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(mesh.VertexBuffer, Marshal.SizeOf(typeof(Vertex)), 0));
            context.InputAssembler.SetIndexBuffer(mesh.IndexBuffer, Format.R16_UInt, 0);
            //draw the quad
            context.DrawIndexed(mesh.indices, 0, 0);
            //dispose of the buffers
            mesh.VertexBuffer.Dispose();
            mesh.IndexBuffer.Dispose();
        }

编辑:我为当前运行的单帧添加了 PIX 函数调用输出:

Frame 40
//setup
<0x06BDA1D8> ID3D11DeviceContext::ClearRenderTargetView(0x06B66190, 0x0028F068)
<0x06BDA1D8> ID3D11DeviceContext::ClearDepthStencilView(0x06B66138, 1, 1.000f, 0)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0028F010, 0x0028EFF8, 0x0028F00C --> 0x06BF8EE0)
CreateObject(D3D11 Buffer, 0x06BF8EE0)
<0x06BDA1D8> ID3D11DeviceContext::PSSetConstantBuffers(0, 1, 0x0028F084 --> { 0x06BF8EE0 })
<0x0059FF78> ID3D11Device::CreateBuffer(0x0F8DEB58, 0x0F8DEB40, 0x0F8DEB54 --> 0x06BF8F68)
CreateObject(D3D11 Buffer, 0x06BF8F68)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0F70EAD8, 0x0F70EAC0, 0x0F70EAD4 --> 0x06BF8FF0)
CreateObject(D3D11 Buffer, 0x06BF8FF0)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0FAAE9A8, 0x0FAAE990, 0x0FAAE9A4 --> 0x06BF9078)
CreateObject(D3D11 Buffer, 0x06BF9078)
<0x0059FF78> ID3D11Device::GetImmediateContext(0x06BDA1D8 --> 0x5BA8A8D8)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0F8DEB58, 0x0F8DEB40, 0x0F8DEB54 --> 0x06BF9100)
CreateObject(D3D11 Buffer, 0x06BF9100)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0F70EAD8, 0x0F70EAC0, 0x0F70EAD4 --> 0x06BF9188)
CreateObject(D3D11 Buffer, 0x06BF9188)
<0x06BDA1D8> ID3D11DeviceContext::Release()
<0x06BDA1D8> ID3D11DeviceContext::UpdateSubresource(0x06B59270, 0, NULL, 0x06287FA0, 0, 0)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0FAAE9A8, 0x0FAAE990, 0x0FAAE9A4 --> 0x06BF9210)
CreateObject(D3D11 Buffer, 0x06BF9210)
<0x06BDA1D8> ID3D11DeviceContext::VSSetShader(0x06B66298, NULL, 0)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0FC0E978, 0x0FC0E960, 0x0FC0E974 --> 0x06BF9298)
CreateObject(D3D11 Buffer, 0x06BF9298)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0FE8EDE8, 0x0FE8EDD0, 0x0FE8EDE4 --> 0x06BF9320)
CreateObject(D3D11 Buffer, 0x06BF9320)
<0x06BDA1D8> ID3D11DeviceContext::PSSetShader(0x06B666F8, NULL, 0)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0FC0E978, 0x0FC0E960, 0x0FC0E974 --> 0x06BF93A8)
CreateObject(D3D11 Buffer, 0x06BF93A8)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0FE8EDE8, 0x0FE8EDD0, 0x0FE8EDE4 --> 0x06BF9430)
CreateObject(D3D11 Buffer, 0x06BF9430)
<0x0059FF78> ID3D11Device::CreateInputLayout(0x0028EBE0, 3, 0x06286CB8, 152, 0x0028EBD8 --> 0x06BF9D68)
CreateObject(D3D11 Input Layout, 0x06BF9D68)
<0x06BDA1D8> ID3D11DeviceContext::IASetInputLayout(0x06BF9D68)
<0x06BDA1D8> ID3D11DeviceContext::IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST)
<0x0059FF78> ID3D11Device::GetImmediateContext(0x06BDA1D8 --> 0x5BA8A8D8)
<0x06BDA1D8> ID3D11DeviceContext::Release()
<0x06BDA1D8> ID3D11DeviceContext::VSSetConstantBuffers(0, 1, 0x0028F024 --> { 0x06B59270 })
<0x06BDA1D8> ID3D11DeviceContext::OMSetRenderTargets(3, 0x0028F004 --> { 0x06B65708, 0x06B657B8, 0x06B582E0 }, 0x06B66138)
<0x06BDA1D8> ID3D11DeviceContext::ClearRenderTargetView(0x06B65708, 0x0028EFEC)
<0x06BDA1D8> ID3D11DeviceContext::ClearRenderTargetView(0x06B657B8, 0x0028EFEC)
<0x06BDA1D8> ID3D11DeviceContext::ClearRenderTargetView(0x06B582E0, 0x0028EFEC)
<0x06BDA1D8> ID3D11DeviceContext::ClearDepthStencilView(0x06B66138, 1, 1.000f, 0)
//draw scene for preproc shader (this should output the three render targets)
//DRAW CALLS HIDDEN
<0x0059FF78> ID3D11Device::CreateBuffer(0x0028EE04, 0x0028EDEC, 0x0028EE00 --> 0x06BF94B8)
CreateObject(D3D11 Buffer, 0x06BF94B8)
<0x0059FF78> ID3D11Device::CreateBuffer(0x0028EE04, 0x0028EDEC, 0x0028EE00 --> 0x06BF9540)
CreateObject(D3D11 Buffer, 0x06BF9540)
<0x06BDA1D8> ID3D11DeviceContext::VSSetShader(0x06B66BB8, NULL, 0)
<0x06BDA1D8> ID3D11DeviceContext::PSSetShader(0x06B66E50, NULL, 0)
<0x0059FF78> ID3D11Device::CreateInputLayout(0x0028EB64, 3, 0x05E988E0, 120, 0x0028EB5C --> 0x06BF9E28)
CreateObject(D3D11 Input Layout, 0x06BF9E28)
<0x06BDA1D8> ID3D11DeviceContext::IASetInputLayout(0x06BF9E28)
<0x06BDA1D8> ID3D11DeviceContext::IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST)
<0x06BDA1D8> ID3D11DeviceContext::OMSetRenderTargets(1, 0x0028EFC0 --> { 0x06B66190 }, 0x06B66138)
<0x06BDA1D8> ID3D11DeviceContext::PSSetShaderResources(1, 3, 0x0028EF3C --> { 0x06B65760, 0x06B58288, 0x06B58338 })
<0x06BDA1D8> ID3D11DeviceContext::PSSetShaderResources(4, 1, 0x0028EFC0 --> { 0x06B66FA0 })
<0x06BDA1D8> ID3D11DeviceContext::ClearRenderTargetView(0x06B66190, 0x0028EFA4)
<0x06BDA1D8> ID3D11DeviceContext::ClearDepthStencilView(0x06B66138, 1, 1.000f, 0)
<0x06BDA1D8> ID3D11DeviceContext::IASetVertexBuffers(0, 1, 0x0028EFAC --> { 0x06BF94B8 }, 0x0028EFB0, 0x0028EFB4)
<0x06BDA1D8> ID3D11DeviceContext::IASetIndexBuffer(0x06BF9540, DXGI_FORMAT_R16_UINT, 0)
//draw quad for post proc shader. This shader takes the three textures in, as well as a random texture, which is added in the second PSSetShaderResources call. The random texture outputs fine.
<0x06BDA1D8> ID3D11DeviceContext::DrawIndexed(6, 0, 0)
<0x06BF94B8> ID3D11Buffer::Release()
<0x06BF9540> ID3D11Buffer::Release()
<0x06B65B00> IDXGISwapChain::Present(0, 0)

EDIT2:我一直在阅读,也许我需要在 preProc 传递之后将纹理作为渲染目标解除分配,然后再将它们作为 ShaderResourceViews 传递给我的 postProcess 着色器。我假设调用 context.OutputMerger.SetTargets() 将释放所有当前分配的渲染目标,然后仅分配函数参数中指定的渲染目标。如果不是这种情况(我还不能确定它是否存在),那么我将如何在 SlimDX 中取消分配渲染目标?

EDIT3:啊,根据这个MSDN Page,调用 OutputMerger.SetRenderTargets() “覆盖所有有界渲染目标和深度模板目标,无论 ppRenderTargetViews 中的渲染目标数量如何。” 所以当我告诉 OutputMerger 渲染到屏幕时,我所有的渲染目标都会自动释放。这让我回到了第一方。

4

1 回答 1

1

通过发现我是多么愚蠢来解决它。

当我创建我的渲染目标时,我创建了一个 Texture2DArray,但我将它视为一组 Texture2D 对象,而不是一个对象。从那以后,我修改了我的代码以使用一组 Texture2D 对象,并且效果很好。

于 2011-08-01T17:40:43.263 回答