0

So I realize there are several questions about this but I feel like I've reviewed them all and still haven't figured out what I did wrong or different. I call this class from a function that I know happens after viewDidLoad. What I am trying to do is be able to take a video stream from a given AVCaptureDevice or camera id and pass this into a WebView as a series of evaluateJavascript calls, I may need to optimize this later but I'm having trouble just getting captureOutput to be called. Certainly, I don't need a preview to be able to capture the output do it? I've confirmed the permissions are correct and the code reaches the point where the sample buffer delegate is being set on the capture session. Any ideas?

class CameraPlugin:
NSObject,
AVCaptureVideoDataOutputSampleBufferDelegate,
AVCaptureMetadataOutputObjectsDelegate,
AVCapturePhotoCaptureDelegate {

private var capturePhotoCompletion: ((Result<UIImage, Error>) -> ())?
private var scanBarcodeCompletion: ((Result<String, Error>) -> ())?
let captureSession = AVCaptureSession()
private var videoSampleListener: VideoSampleListener?


func startStreamingCamera(cameraId: String?, camera: AVCaptureDevice?, listener: VideoSampleListener) {
    self.videoSampleListener = listener
    var inputCam = camera
    if (cameraId != nil) {
        inputCam = self.retrieveVideoCaptureDeviceFromId(id: cameraId!)
        if (inputCam == nil) {
            return
        }
    } else if (inputCam == nil) {
        return
    }
    
    self.haveCaptureDeviceAccess(type: .video) { granted in
        if granted {
            do {
                let captureDeviceInput = try AVCaptureDeviceInput(device: inputCam!)
                let captureDeviceOutput = AVCaptureVideoDataOutput()
                
                guard
                    self.captureSession.canAddInput(captureDeviceInput),
                    self.captureSession.canAddOutput(captureDeviceOutput)
                else {
                    return
                }
                
                self.captureSession.addInput(captureDeviceInput)
                self.captureSession.addOutput(captureDeviceOutput)
                
                captureDeviceOutput.setSampleBufferDelegate(self, queue: .global())
            } catch {
            }
        } else {
        }
    }
}


/*
 * Converts captured video frame to a jpeg image
 */
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
    guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
        return
    }
    let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
    guard let cgImage = CIContext().createCGImage(ciImage, from: ciImage.extent) else {
        return
    }
    let image = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
    
    guard let imageData = image.jpegData(compressionQuality: 0.7)?.base64EncodedString() else {
        return
    }
    
    if (videoSampleListener != nil) {
        videoSampleListener!.receivedVideoSample(imageData: imageData)
    }
}

func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {

}

private func configurePhotoSettings() -> AVCapturePhotoSettings {
    let settings = AVCapturePhotoSettings()
    settings.isHighResolutionPhotoEnabled = true
    return settings
}

private func retrieveBarcodeMetadataObjectTypes() -> [AVMetadataObject.ObjectType] {
    return [
        AVMetadataObject.ObjectType.upce,
        AVMetadataObject.ObjectType.code39,
        AVMetadataObject.ObjectType.code39Mod43,
        AVMetadataObject.ObjectType.ean13,
        AVMetadataObject.ObjectType.ean8,
        AVMetadataObject.ObjectType.code93,
        AVMetadataObject.ObjectType.code128,
        AVMetadataObject.ObjectType.pdf417,
        AVMetadataObject.ObjectType.qr,
        AVMetadataObject.ObjectType.aztec,
        AVMetadataObject.ObjectType.interleaved2of5,
        AVMetadataObject.ObjectType.itf14,
        AVMetadataObject.ObjectType.dataMatrix
    ]
}

private func haveCaptureDeviceAccess(type: AVMediaType, completion: @escaping (Bool) -> ()) {
    switch AVCaptureDevice.authorizationStatus(for: type) {
    case .denied:
        completion(false)
        
    case .notDetermined:
        AVCaptureDevice.requestAccess(for: type) { granted in
            completion(granted)
        }
    
    default:
        completion(true)
    }
}

func retrieveVideoCaptureDeviceFromId(id: String) -> AVCaptureDevice? {
    return self.retrieveAvailableVideoCaptureDevices().first(where: { device in device.uniqueID == id })
}

func retrieveAvailableVideoCaptureDevices() -> [AVCaptureDevice] {
    let discoverySession = AVCaptureDevice.DiscoverySession(
        deviceTypes: self.retrievePlatformDeviceTypes(),
        mediaType: .video,
        position: .unspecified
    )
    return discoverySession.devices
}

private func retrievePlatformDeviceTypes() -> [AVCaptureDevice.DeviceType] {
    var deviceTypes: [AVCaptureDevice.DeviceType] = [
        .builtInDualCamera,
        .builtInWideAngleCamera,
        .builtInTelephotoCamera
    ]
    if #available(iOS 11.1, *) {
        deviceTypes += [
            .builtInTrueDepthCamera
        ]
    }
    if #available(iOS 13.0, *) {
        deviceTypes += [
            .builtInDualWideCamera,
            .builtInTripleCamera,
            .builtInUltraWideCamera
        ]
    }
    return deviceTypes
}

}

The calling code:

guard let videoCaptureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
        return
    }
    let camPlugin = CameraPlugin()
    camPlugin.startStreamingCamera(cameraId: nil, camera: videoCaptureDevice, listener: self)
4

1 回答 1

0

Update: There is actually nothing wrong with the code here. The issue I was having was that the instance of the class that is described above was being garbage collected.

于 2021-11-04T14:46:48.543 回答