0

嗨,是否可以像 3dtexture 一样在自定义着色器raymarching 上渲染3d 渲染纹理

我使用 3D 渲染纹理,因为我在计算着色器中计算和设置 3D 体积的颜色。我将 rendertexture 3D 设置为如下所示:

output3DRenderTexture= new RenderTexture(m_CubeDim.x, m_CubeDim.y, 0, thisTexFormat);
        outpuoutput3DRenderTextureRendTex.enableRandomWrite = true;
        output3DRenderTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex3D;
        output3DRenderTexture.volumeDepth = m_CubeDim.z;
        output3DRenderTexture.Create();

我在计算着色器中填充 3D RenderTexture 数据,GetData帮助我确认 3D 渲染纹理具有所有正确的颜色数据。

如果我将自定义着色器的 sampler3D 中的 3D renderTex 替换为我用 Tex2D 切片创建的 3DTexture,我可以成功渲染

cubeRenderer.material.SetTexture("_MainTex", output3DRenderTexture);//this does not render

相对

cubeRenderer.material.SetTexture("_MainTex", outputTexture3D);//this renders

2016 年的这篇文章似乎表明可以在自定义着色器中渲染 3d 渲染纹理,但它现在可能已经过时,它对我不起作用,也没有错误显示。

在我看来,如果在 GPU 中创建 Textures2D 切片,继续在 CPU 上创建 Texture3D 并将此 Tex3D 重新发送到 GPU 以供自定义着色器使用,则可能会对性能造成重大影响。毕竟除了 RenderTexture 设置为 Tex3D 之外,GPU 中已经存在所有 3D 体积。谢谢!

着色器:

    #include "UnityCG.cginc"
#define ITERATIONS 100
#define PI2 6.28318530718
half4 _Color;
sampler3D _MainTex;
half _Intensity, _Threshold;
half3 _SliceMin, _SliceMax;
float4x4 _AxisRotationMatrix;
float _Angle;
struct Ray {
    float3 origin;
    float3 dir;
};
struct AABB {
    float3 min;
    float3 max;
};
// https http.download.nvidia.com/developer/presentations/2005/GDC/Audio_and_Slides/VolumeRendering_files/GDC_2_files/GDC_2005_VolumeRenderingForGames_files/Slide0073.htm
bool intersect(Ray r, AABB aabb, out float t0, out float t1)
{
    float3 invR = 1.0 / r.dir;
    float3 tbot = invR * (aabb.min - r.origin);
    float3 ttop = invR * (aabb.max - r.origin);
    float3 tmin = min(ttop, tbot);
    float3 tmax = max(ttop, tbot);
    float2 t = max(tmin.xx, tmin.yz);
    t0 = max(t.x, t.y);
    t = min(tmax.xx, tmax.yz);
    t1 = min(t.x, t.y);
    return t0 <= t1;
}
float3 get_uv(float3 p) {
    return (p + 0.5);
}

float sample_volume(float3 uv, float3 p)
{
    float v = tex3D(_MainTex, uv).r * _Intensity;
    return v;
}
struct appdata
{
    float4 vertex : POSITION;
    float2 uv : TEXCOORD0;
};
struct v2f
{
    float4 vertex : SV_POSITION;
    float2 uv : TEXCOORD0;
    float3 world : TEXCOORD1;
    float3 local : TEXCOORD2;
};
v2f vert(appdata v)
{
    v2f o;
    o.vertex = UnityObjectToClipPos(v.vertex);
    o.uv = v.uv;
    o.world = mul(unity_ObjectToWorld, v.vertex).xyz;
    o.local = v.vertex.xyz;
    return o;
}
fixed4 frag(v2f i) : SV_Target
{
  Ray ray;
    ray.origin = i.local;
    // world space direction to object space
    float3 dir = (i.world - _WorldSpaceCameraPos);
    ray.dir = normalize(mul(unity_WorldToObject, dir));
    AABB aabb;
    aabb.min = float3(-0.5, -0.5, -0.5);
    aabb.max = float3(0.5, 0.5, 0.5);
    float tnear;
    float tfar;
    intersect(ray, aabb, tnear, tfar);
    tnear = max(0.0, tnear);
    // float3 start = ray.origin + ray.dir * tnear;
    float3 start = ray.origin;
    float3 end = ray.origin + ray.dir * tfar;
    float dist = abs(tfar - tnear);
    float step_size = dist / float(ITERATIONS);
    float3 ds = normalize(end - start) * step_size;
    float4 dst = float4(0, 0, 0, 0);
    float3 p = start;
    [unroll]
    for (int iter = 0; iter < ITERATIONS; iter++)
    {
      float3 uv = get_uv(p);
      float v = sample_volume(uv, p);
      float4 src = float4(v, v, v, v);
      src.a *= 0.5;
      src.rgb *= src.a;
      // blend
      dst = (1.0 - dst.a) * src + dst;
      p += ds;
      if (dst.a > _Threshold) break;
}
return saturate(dst) * _Color;
}
#endif
4

0 回答 0