1

我正在制作 QR 扫描仪。当所有代码都写在内部的一个地方时,我的代码正在工作,ViewController但是当我将它模块化时,我没有得到内部回调AVCaptureMetadataOutputObjectsDelegate

import Foundation
import UIKit
import AVFoundation

class CameraSource : NSObject {

    private var session                     : AVCaptureSession?
    private var inputDevice             : AVCaptureDeviceInput?
    private var videoPreviewLayer   : AVCaptureVideoPreviewLayer?

    private var captureMetadataOutput : AVCaptureMetadataOutput?

    func setCaptureMetadataOutput() {
        self.captureMetadataOutput = nil
        self.captureMetadataOutput = AVCaptureMetadataOutput()
    }

    func getCaptureMetadataOutput() -> AVCaptureMetadataOutput? {
        return self.captureMetadataOutput
    }

    func setInputDevice(inputDevice : AVCaptureDeviceInput?) {
        self.inputDevice = inputDevice
    }

    func getInputDevice() -> AVCaptureDeviceInput? {
        return self.inputDevice
    }

    func setSession(session : AVCaptureSession?) {
        self.session = session
    }

    func getSession() -> AVCaptureSession? {
        return self.session
    }

    func setMetadataObjects(metaObjects : [AVMetadataObject.ObjectType], delegate : AVCaptureMetadataOutputObjectsDelegate) {
        assert(self.captureMetadataOutput != nil)
        self.captureMetadataOutput!.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
        self.captureMetadataOutput!.metadataObjectTypes = metaObjects
    }

    func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) {
        assert(session != nil)

        videoPreviewLayer                                                           = AVCaptureVideoPreviewLayer(session: session!)
        videoPreviewLayer!.videoGravity                                 = videoGravity
        videoPreviewLayer!.connection!.videoOrientation = orientation
    }

    func addVideoLayerToImageView(imageView : UIImageView) {
        assert(self.videoPreviewLayer != nil)

        imageView.layer.addSublayer(self.videoPreviewLayer!)
        self.videoPreviewLayer!.frame = imageView.bounds
    }

    func startSession() {
        assert(session != nil)
        self.session!.startRunning()
    }


    /*==========================================================================
    STATIC FUNCTIONS
    ==========================================================================*/

    static func getBackCamera() -> AVCaptureDevice {
        return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)!
    }

    static func getFrontCamera() -> AVCaptureDevice {
        return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
    }

    static func isCameraAvailable() -> Bool {
        if #available(iOS 10.0, *) {
            let count : Int = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
                                                                                                                 mediaType: AVMediaType.video,
                                                                                                                 position: .unspecified).devices.count
            if count > 0 { return true }
        }
        else {
            let count = AVCaptureDevice.devices(for: AVMediaType.video).count
            if count > 0 { return true }
        }
        return false
    }


    /*==========================================================================
    CAMERA BUILDER CLASS
    ==========================================================================*/

    class Builder {

        var cameraSource : CameraSource

        init() {
            cameraSource = CameraSource()
        }

        func createSession() -> Builder {
            if (cameraSource.getSession() != nil) {
                cameraSource.setSession(session: nil)
            }
            cameraSource.setSession(session: AVCaptureSession())
            return self
        }

        func setSessionPreset(preset : AVCaptureSession.Preset) -> Builder {
            assert(cameraSource.getSession() != nil)

            cameraSource.getSession()!.sessionPreset = preset
            return self
        }

        func attachInputDevice(camera : AVCaptureDevice) throws -> Builder {

            try self.prepareInputDevice(camera: camera)
            try self.addInputToSession()

            assert(cameraSource.inputDevice != nil)
            return self
        }

        func addOutputToSessionForMetaData() throws -> CameraSource {
            cameraSource.setCaptureMetadataOutput()

            assert(cameraSource.getSession() != nil && cameraSource.getCaptureMetadataOutput() != nil)

            if !cameraSource.getSession()!.canAddOutput(cameraSource.getCaptureMetadataOutput()!) {
                throw AppErrorCode.cameraError("Unable to attach output to camera session")
            }
            cameraSource.getSession()!.addOutput(cameraSource.getCaptureMetadataOutput()!)

            return self.cameraSource
        }

        /*==========================================================================
        BUILDER PRIVATE FUNCTIONS
        ==========================================================================*/

        private func prepareInputDevice(camera : AVCaptureDevice) throws {
            do {
                let inputDevice = try AVCaptureDeviceInput(device: camera)
                cameraSource.setInputDevice(inputDevice: inputDevice)

            } catch let error as NSError {
                print(error.localizedDescription)
                throw AppErrorCode.cameraError("Unable to attach input to camera session")
            }
        }

        private func addInputToSession() throws {
            if(cameraSource.getSession() == nil) {
                throw AppErrorCode.cameraError("Unable to create camera session")
            }

            assert(cameraSource.getInputDevice() != nil && cameraSource.getSession()!.canAddInput(cameraSource.getInputDevice()!))

            cameraSource.getSession()!.addInput(cameraSource.getInputDevice()!)
        }

    }


}

我的二维码扫描仪代码看起来像

import UIKit
import Foundation
import AVFoundation

protocol QRScannerDelegate {
    func scannedData(_ scannedString : String)
}

class QRScanner : NSObject {

    private var cameraSource : CameraSource?

    var delegate : QRScannerDelegate?

    func prepareCamera (delegate : QRScannerDelegate) throws -> QRScanner {
        do {
            self.delegate = delegate
            self.cameraSource = try CameraSource
                .Builder()
                .createSession()
                .setSessionPreset(preset: .photo)
                .attachInputDevice(camera: CameraSource.getBackCamera())
                .addOutputToSessionForMetaData()

            self.cameraSource!.setMetadataObjects(metaObjects: [.qr], delegate: self as AVCaptureMetadataOutputObjectsDelegate)

        } catch let err as NSError {
            print(err.localizedDescription)
            self.cameraSource = nil
            throw AppErrorCode.cameraError("Unable to process camera with one or more issue")
        }

        return self
    }

    func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) -> QRScanner{
        assert(cameraSource != nil)

        self.cameraSource!.initViewoPreviewLayer(videoGravity: videoGravity, orientation: orientation)
        return self
    }

    func addVideoLayerToImageView(imageView : UIImageView) -> QRScanner{
        assert(cameraSource != nil)

        self.cameraSource!.addVideoLayerToImageView(imageView: imageView)
        return self
    }

    func startSession() {
        assert(cameraSource != nil)
        self.cameraSource!.startSession()
    }
}

extension QRScanner : AVCaptureMetadataOutputObjectsDelegate {
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {

        print("Delegate called")
        if metadataObjects.count == 0 {

            self.delegate?.scannedData("No Data")

        } else {

            let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
            if metadataObj.type == AVMetadataObject.ObjectType.qr {

                if metadataObj.stringValue != nil {
                    print("Scanner Getting data: \(metadataObj.stringValue!)")
                    self.delegate?.scannedData(metadataObj.stringValue!)
                }

            }

        }
    }
}

我已经实现了QRScannerDelegate我的,ViewController但我没有得到任何东西。此外,我AVCaptureMetadataOutputObjectsDelegate什至没有得到回调。

我尝试传递ViewController实例,AVCaptureMetadataOutputObjectsDelegate然后我收到扫描信息的回调。

所以我的问题是为什么会发生这种情况?

1)当我通过普通课程时,因为AVCaptureMetadataOutputObjectsDelegate我没有得到回调。但。

2)当我传递UIViewController实例时,AVCaptureMetadataOutputObjectsDelegate我能够得到回调。

更新

这就是我从我的电话中调用 prepareCamera 的方式View Controller

override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try QRScanner().prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }
4

1 回答 1

1

Its hard to say for sure without knowing how you called prepareCamera as this is what triggers setMetadataObjectsDelegate but to me it looks like you may not be keeping a strong reference to QRScanner in your ViewController (instantiating it as in instance variable) Which could explain why the callback is getting hit when your ViewController is your AVCaptureMetadataOutputObjectsDelegate as the ViewController is still in memory.

It's also worth noting that if the ViewController is your QRScannerDelegate you will want to define delegate as weak var delegate : QRScannerDelegate? to prevent a memory leak.

EDIT: Change

override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try QRScanner().prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }

to

var qrScanner = QRScanner()
override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try self.qrScanner.prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }

and change

protocol QRScannerDelegate {
    func scannedData(_ scannedString : String)
}

to

protocol QRScannerDelegate: class {
    func scannedData(_ scannedString : String)
}

To Allow a weak delegate

AVCaptureMetadataOutputObjectsDelegate is tough, but you can do some really cool stuff with it! So keep at it.

I pulled some QRScanner code I wrote a while ago and put it into a gist for you if you want to check it out. Its a bit more stripped down than what you have, but you may find it helpful. https://gist.github.com/aChase55/733ea89af1bfa80c65971d3bc691f0b2

于 2019-05-02T10:55:26.313 回答