11

我想要做什么

我正在尝试使用金属视图在相机提要上显示过滤器:MTKView。我正在密切关注 Apple 示例代码的方法 -通过利用 TrueDepth 相机数据增强实时视频链接)。

到目前为止我所拥有的

以下代码效果很好(主要从上述示例代码中解释):

    class MetalObject: NSObject, MTKViewDelegate {

            private var metalBufferView         : MTKView?
            private var metalDevice             = MTLCreateSystemDefaultDevice()
            private var metalCommandQueue       : MTLCommandQueue!

            private var ciContext               : CIContext!
            private let colorSpace              = CGColorSpaceCreateDeviceRGB()

            private var videoPixelBuffer        : CVPixelBuffer?

            private let syncQueue               = DispatchQueue(label: "Preview View Sync Queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)

            private var textureWidth            : Int             = 0
            private var textureHeight           : Int             = 0
            private var textureMirroring        = false
            private var sampler                 : MTLSamplerState!
            private var renderPipelineState     : MTLRenderPipelineState!
            private var vertexCoordBuffer       : MTLBuffer!
            private var textCoordBuffer         : MTLBuffer!
            private var internalBounds          : CGRect!
            private var textureTranform         : CGAffineTransform?

            private var previewImage            : CIImage?

    init(with frame: CGRect) {
        super.init()

        self.metalBufferView = MTKView(frame: frame, device: self.metalDevice)
        self.metalBufferView!.contentScaleFactor = UIScreen.main.nativeScale
        self.metalBufferView!.framebufferOnly = true
        self.metalBufferView!.colorPixelFormat = .bgra8Unorm
        self.metalBufferView!.isPaused = true
        self.metalBufferView!.enableSetNeedsDisplay = false
        self.metalBufferView!.delegate = self

        self.metalCommandQueue = self.metalDevice!.makeCommandQueue()

        self.ciContext = CIContext(mtlDevice: self.metalDevice!)


        //Configure Metal
        let defaultLibrary = self.metalDevice!.makeDefaultLibrary()!
        let pipelineDescriptor = MTLRenderPipelineDescriptor()
        pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
        pipelineDescriptor.vertexFunction = defaultLibrary.makeFunction(name: "vertexPassThrough")
        pipelineDescriptor.fragmentFunction = defaultLibrary.makeFunction(name: "fragmentPassThrough")

        // To determine how our textures are sampled, we create a sampler descriptor, which
        // will be used to ask for a sampler state object from our device below.
        let samplerDescriptor = MTLSamplerDescriptor()
        samplerDescriptor.sAddressMode = .clampToEdge
        samplerDescriptor.tAddressMode = .clampToEdge
        samplerDescriptor.minFilter = .linear
        samplerDescriptor.magFilter = .linear

        sampler = self.metalDevice!.makeSamplerState(descriptor: samplerDescriptor)

        do {
            renderPipelineState = try self.metalDevice!.makeRenderPipelineState(descriptor: pipelineDescriptor)
        } catch {
            fatalError("Unable to create preview Metal view pipeline state. (\(error))")
        }

    }




    final func update (newVideoPixelBuffer: CVPixelBuffer?) {

        self.syncQueue.async {

            var filteredImage : CIImage

            self.videoPixelBuffer = newVideoPixelBuffer

            //---------
            //Core image filters
            //Strictly CIFilters, chained together
            //---------

            self.previewImage = filteredImage

            //Ask Metal View to draw
            self.metalBufferView?.draw()

        }
    }



    //MARK: - Metal View Delegate
    final func draw(in view: MTKView) {

        print (Thread.current)

        guard let drawable = self.metalBufferView!.currentDrawable,
            let currentRenderPassDescriptor = self.metalBufferView!.currentRenderPassDescriptor,
            let previewImage = self.previewImage else {
                return
        }


        // create a texture for the CI image to render to
        let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
            pixelFormat: .bgra8Unorm,
            width: Int(previewImage.extent.width),
            height: Int(previewImage.extent.height),
            mipmapped: false)
        textureDescriptor.usage = [.shaderWrite, .shaderRead]

        let texture = self.metalDevice!.makeTexture(descriptor: textureDescriptor)!

        if texture.width != textureWidth ||
            texture.height != textureHeight ||
            self.metalBufferView!.bounds != internalBounds {
            setupTransform(width: texture.width, height: texture.height, mirroring: mirroring, rotation: rotation)
        }

        // Set up command buffer and encoder
        guard let commandQueue = self.metalCommandQueue else {
            print("Failed to create Metal command queue")
            return
        }

        guard let commandBuffer = commandQueue.makeCommandBuffer() else {
            print("Failed to create Metal command buffer")
            return
        }

        // add rendering of the image to the command buffer
        ciContext.render(previewImage,
                         to: texture,
                         commandBuffer: commandBuffer,
                         bounds: previewImage.extent,
                         colorSpace: self.colorSpace)

        guard let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) else {
            print("Failed to create Metal command encoder")
            return
        }

        // add vertex and fragment shaders to the command buffer
        commandEncoder.label = "Preview display"
        commandEncoder.setRenderPipelineState(renderPipelineState!)
        commandEncoder.setVertexBuffer(vertexCoordBuffer, offset: 0, index: 0)
        commandEncoder.setVertexBuffer(textCoordBuffer, offset: 0, index: 1)
        commandEncoder.setFragmentTexture(texture, index: 0)
        commandEncoder.setFragmentSamplerState(sampler, index: 0)
        commandEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
        commandEncoder.endEncoding()

        commandBuffer.present(drawable) // Draw to the screen
        commandBuffer.commit()

    }


    final func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {

    }

}

笔记

  • MTKViewDelegate使用而不是子类化的原因是MTKView当它被子类化时,在主线程上调用了绘图调用。使用上面显示的委托方法,似乎是一个不同的金属相关线程调用每个循环。上述方法似乎提供了更好的性能。
  • CIFilter必须编辑有关上述更新方法的使用细节。这一切都是一个沉重的链条CIFilters堆叠。不幸的是,这些过滤器没有任何调整的余地。

问题

上面的代码似乎大大减慢了主线程的速度,导致应用程序 UI 的其余部分不稳定。例如,滚动 a UIScrollviewget 似乎很慢且不稳定。

目标

调整 Metal 视图以减轻 CPU 负担并轻松处理主线程,从而为 UI 的其余部分留下足够的汁液。

根据上图,命令缓冲区的准备工作都是在 CPU 中完成的,直到提交并提交(?)。有没有办法从 CPU 中卸载它?

任何提高绘图效率的提示、反馈、提示等将不胜感激。

4

1 回答 1

11

您可以采取一些措施来提高性能:

  • 直接渲染到视图的可绘制对象中,而不是渲染到纹理中,然后再次渲染以将该纹理渲染到视图中。
  • 使用新的CIRenderDestinationAPI 将实际的纹理检索推迟到视图实际渲染到的那一刻(即,当 Core Image 完成时)。

这是draw(in view: MTKView)我在我的 Core Image 项目中使用的,针对您的情况进行了修改:

public func draw(in view: MTKView) {
    if let currentDrawable = view.currentDrawable,
        let commandBuffer = self.commandQueue.makeCommandBuffer() {
        let drawableSize = view.drawableSize

        // optional: scale the image to fit the view
        let scaleX = drawableSize.width / image.extent.width
        let scaleY = drawableSize.height / image.extent.height
        let scale = min(scaleX, scaleY)
        let scaledImage = previewImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))

        // optional: center in the view
        let originX = max(drawableSize.width - scaledImage.extent.size.width, 0) / 2
        let originY = max(drawableSize.height - scaledImage.extent.size.height, 0) / 2
        let centeredImage = scaledImage.transformed(by: CGAffineTransform(translationX: originX, y: originY))

        // create a render destination that allows to lazily fetch the target texture
        // which allows the encoder to process all CI commands _before_ the texture is actually available;
        // this gives a nice speed boost because the CPU doesn’t need to wait for the GPU to finish
        // before starting to encode the next frame
        let destination = CIRenderDestination(width: Int(drawableSize.width),
                                              height: Int(drawableSize.height),
                                              pixelFormat: view.colorPixelFormat,
                                              commandBuffer: commandBuffer,
                                              mtlTextureProvider: { () -> MTLTexture in
                                                return currentDrawable.texture
        })

        let task = try! self.context.startTask(toRender: centeredImage, to: destination)
        // bonus: you can Quick Look the task to see what’s actually scheduled for the GPU

        commandBuffer.present(currentDrawable)
        commandBuffer.commit()

        // optional: you can wait for the task execution and Quick Look the info object to get insights and metrics
        DispatchQueue.global(qos: .background).async {
            let info = try! task.waitUntilCompleted()
        }
    }
}

如果这仍然太慢,您可以尝试priorityRequestLow CIContextOption在创建时设置CIContext告诉 Core Image 以低优先级渲染。

于 2019-04-25T13:39:32.317 回答