1

我正在尝试导出在 ARSCNView 渲染过程中创建的深度纹理。为了做到这一点,我编写了一个代码,在后台使用自定义呈现 SCNScene MTLRenderPassDescriptor。当我使用“GPU Capture tool”跟踪绑定到 GPU 的资源时,我发现在SCNRenderer.render方法MTLRenderPassDescriptor中忽略了自定义。

我使用此代码将 SCNScene 渲染到屏幕外。

// Render Pass - render sceneView
renderer.scene = sceneView.scene
renderer.pointOfView = sceneView.pointOfView
renderer.render(atTime: 0, viewport: viewport, commandBuffer: commandBuffer, passDescriptor: renderPassDescriptor)

当我检查 GPU 捕获中的资源时,渲染器会生成自己的帧纹理和深度纹理,在renderPassDescriptor. 根据文件,这很奇怪。我还在没有 ARKit 会话的情况下对此进行了测试,它按预期工作(渲染器使用 中描述的纹理资源renderPassDescriptor)。我怎样才能解决这个问题?这是一个 SceneKit 错误吗?

图 1. 我在 GPU 捕获中发现深度纹理与 blit pass 无关。

图 2. 颜色附件纹理地址为 0x144a4f310

图 3. 深度附件纹理地址为 0x144a50050

图 4. 绑定到渲染方法的纹理具有不同的地址

这是最小工作示例。

import UIKit
import SceneKit
import ARKit

class ViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {

  @IBOutlet var sceneView: ARSCNView!
  var ship: SCNNode!
  var device: MTLDevice!
  var renderer: SCNRenderer!
  var commandQueue: MTLCommandQueue!
  let textureSizeX = 2732
  let textureSizeY = 2048
  lazy var viewport = CGRect(x: 0, y: 0, width: CGFloat(textureSizeX), height: CGFloat(textureSizeY))
   
   
  override func viewDidLoad() {
    super.viewDidLoad()
     
    // Set the view's delegate
    sceneView.delegate = self
    sceneView.session.delegate = self
    sceneView.showsStatistics = true
    sceneView.scene = SCNScene()
     
    ship = SCNScene(named: "art.scnassets/ship.scn")?.rootNode.childNode(withName: "shipMesh", recursively: true)!
    sceneView.scene.rootNode.addChildNode(ship)
     
     
    // background renderer
    device = MTLCreateSystemDefaultDevice()!
    renderer = SCNRenderer(device: device, options: nil)
    commandQueue = device.makeCommandQueue()!
  }
   
  override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
     
    guard let referenceImages = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else {
      fatalError("Missing expected asset catalog resources.")
    }

    // Create a session configuration
    let configuration = ARWorldTrackingConfiguration()
    configuration.detectionImages = referenceImages

    // Run the view's session
    sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
  }
   
  override func viewWillDisappear(_ animated: Bool) {
    super.viewWillDisappear(animated)
     
    // Pause the view's session
    sceneView.session.pause()
  }

  // MARK: - ARSCNViewDelegate
   
  // Override to create and configure nodes for anchors added to the view's session.
  func renderer(_ renderer: SCNSceneRenderer, willRenderScene scene: SCNScene, atTime time: TimeInterval) {
    doRender()
  }
   
  func doRender() {
    let renderPassDescriptor = makeRenderPassDescriptor()
    let commandBuffer = commandQueue.makeCommandBuffer()!
     
    // Render Pass - render sceneView
    renderer.scene = sceneView.scene
    renderer.pointOfView = sceneView.pointOfView
    renderer.render(atTime: 0, viewport: viewport, commandBuffer: commandBuffer, passDescriptor: renderPassDescriptor)
     
    // Blit Pass - copy depth texture to buffer
    let imageWidth = Int(textureSizeX)
    let imageHeight = Int(textureSizeY)
    let pixelCount = imageWidth * imageHeight
    let depthImageBuffer = device.makeBuffer(length: 4 * pixelCount, options: .storageModeShared)!
    let blitEncoder = commandBuffer.makeBlitCommandEncoder()!
    blitEncoder.copy(from: renderPassDescriptor.depthAttachment.texture!,
             sourceSlice: 0,
             sourceLevel: 0,
             sourceOrigin: MTLOriginMake(0, 0, 0),
             sourceSize: MTLSizeMake(imageWidth, imageHeight, 1),
             to: depthImageBuffer,
             destinationOffset: 0,
             destinationBytesPerRow: 4 * imageWidth,
             destinationBytesPerImage: 4 * pixelCount,
             options: .depthFromDepthStencil)

    blitEncoder.endEncoding()
     
     
    commandBuffer.commit()
    // Wait until depth buffer copying is done.
    commandBuffer.waitUntilCompleted()
  }
   
  func makeRenderPassDescriptor() -> MTLRenderPassDescriptor {
    let renderPassDescriptor = MTLRenderPassDescriptor()

    let frameBufferDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .rgba8Unorm_srgb, width: textureSizeX, height: textureSizeY, mipmapped: false)
    frameBufferDescriptor.usage = [.renderTarget, .shaderRead]
    renderPassDescriptor.colorAttachments[0].texture = device.makeTexture(descriptor: frameBufferDescriptor)!
    renderPassDescriptor.colorAttachments[0].loadAction = .clear
    renderPassDescriptor.colorAttachments[0].storeAction = .store
    renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(1, 1, 1, 1.0)

    let depthBufferDescriptor: MTLTextureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .depth32Float, width: textureSizeX, height: textureSizeY, mipmapped: false)
    depthBufferDescriptor.usage = .renderTarget
    renderPassDescriptor.depthAttachment.texture = device.makeTexture(descriptor: depthBufferDescriptor)
    renderPassDescriptor.depthAttachment.loadAction = .clear
    renderPassDescriptor.depthAttachment.storeAction = .store
     
    return renderPassDescriptor
  }
   
  func session(_ session: ARSession, didFailWithError error: Error) {
    // Present an error message to the user
     
  }
   
  func session(_ session: ARSession, didUpdate frame: ARFrame) {

  }
   
  func sessionWasInterrupted(_ session: ARSession) {
    // Inform the user that the session has been interrupted, for example, by presenting an overlay
     
  }
   
  func sessionInterruptionEnded(_ session: ARSession) {
    // Reset tracking and/or remove existing anchors if consistent tracking is required
     
  }
}
4

0 回答 0