5

我在捕获管道中使用渲染器类向视频添加 CI 过滤器。在渲染器的 copyRenderedPixelBuffer 中,我还想复制像素缓冲区并将其发送到 Vision 以检测面部标志。

我为 Vision 制作了一个带有串行调度队列的单例。问题是,一旦我添加了调度队列,pixelBuffer 就不会从内存中释放 - 因此会发生大量泄漏(即使像素缓冲区在 objc 代码中被释放)。随着调度队列的静音,内存泄漏消失了(但是,由于 Vision 功能,视频预览存在大量视频延迟)。

非常感谢任何帮助!

- (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer
{
    OSStatus err = noErr;
    CVPixelBufferRef renderedOutputPixelBuffer = NULL;

    CVPixelBufferRef visionOutputPixelBuffer;

    CIImage *sourceImage = nil;

    err = CVPixelBufferPoolCreatePixelBuffer( kCFAllocatorDefault, _bufferPool, &renderedOutputPixelBuffer );
    if ( err ) {
        NSLog(@"Cannot obtain a pixel buffer from the buffer pool (%d)", (int)err );
        goto bail;
    }

    err = CVPixelBufferPoolCreatePixelBuffer( kCFAllocatorDefault, _bufferPool, &visionOutputPixelBuffer );
    if ( err ) {
        NSLog(@"Cannot obtain a pixel buffer from the buffer pool (%d)", (int)err );

    }

    // Vision

    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    int bufferHeight = (int)CVPixelBufferGetHeight(pixelBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer);

    CVPixelBufferLockBaseAddress(visionOutputPixelBuffer, 0);

    uint8_t *copyBaseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(visionOutputPixelBuffer);
    memcpy(copyBaseAddress, baseAddress, bufferHeight * bytesPerRow);


    CVPixelBufferUnlockBaseAddress(pixelBuffer,0);
    CVPixelBufferUnlockBaseAddress(visionOutputPixelBuffer,0);



        [[VisionFaceDetection sharedInstance] detectFaceOnPixelBuffer:visionOutputPixelBuffer];
    CVPixelBufferRelease(visionOutputPixelBuffer);

// ... Other Filter code here ...


bail:
    if(sourceImage != nil)
    {
        [sourceImage release];
    }


    return renderedOutputPixelBuffer;

}

Vision 的 Swift 类

@objc final class VisionFaceDetection: NSObject {

    @objc static let sharedInstance = VisionFaceDetection()  // singleton so it can handle the async dispatch

    private override init() {}

    let serialQueue = DispatchQueue(label: "vision", qos: DispatchQoS.userInitiated)

    let faceLandmarks = VNDetectFaceLandmarksRequest()
    let faceLandmarksDetectionRequest = VNSequenceRequestHandler()

    // CVPixelBuffer

    @objc func detectFace(onPixelBuffer pixelBuffer: CVPixelBuffer) {

        // Currently this block causes a memory leak with the pixel buffer
        serialQueue.async {

            let faceDetectionRequest = VNDetectFaceRectanglesRequest { [weak self] (request, error) in

                guard let observations = request.results as? [VNFaceObservation] else {

                    print("Unexpected result type from face detection request")
                    return
                }

                // happens even when these are muted
//                self?.faceLandmarks.inputFaceObservations = observations
//                self?.detectLandmarks(onPixelBuffer: pixelBuffer)

            }

            let faceDetectionRequestHandler = VNSequenceRequestHandler()

            try? faceDetectionRequestHandler.perform([faceDetectionRequest], on: pixelBuffer)

        }
    }
// ... other methods in class
}
4

0 回答 0