0

在我的应用程序中,有一个从前置摄像头和稀有摄像头拍摄照片的功能。两个相机都可以在ios 10.2.1 及更低版本上正常工作,但是如果我在iOS 10.3.2上运行应用程序 并从前置摄像头拍摄照片,一切正常,但是当我尝试从罕见或后置摄像头拍摄照片时,应用程序会因错误而崩溃“失去与 X 的 iPhone 的连接”。我用谷歌搜索,发现当图像尺寸较大时会出现此问题,这是内存问题。但我也尝试通过调整照片大小来解决这个问题。您能否指出我所缺少的。

注意: - 拍摄照片也比相机的正常工作要慢。

在此处输入图像描述

源代码

类相机库:NSObject {

weak var delegate: CameraLibraryDelegate?

var session: AVCaptureSession!

var sessionQueue: DispatchQueue!
var stillImageOutput: AVCaptureStillImageOutput?
init(sender: AnyObject) {
    super.init()
    self.delegate = sender as? CameraLibraryDelegate
    self.setObservers()
    self.initializeSession()
}

deinit {
    self.removeObservers()
}

// MARK: Session

func initializeSession() {
    self.session = AVCaptureSession()
    self.session.sessionPreset = AVCaptureSessionPresetPhoto
    self.sessionQueue = DispatchQueue(label: "camera session", attributes: [])

    self.sessionQueue.async {
        self.session.beginConfiguration()
        self.addVideoInput()
        self.addStillImageOutput()
        self.session.commitConfiguration()

        DispatchQueue.main.async {
            NSLog("Session initialization did complete")
            self.delegate?.cameraSessionConfigurationDidComplete()
        }
    }
}

func startCamera() {
    self.sessionQueue.async {
        self.session.startRunning()
    }
}

func stopCamera() {
    self.sessionQueue.async {
        self.session.stopRunning()
    }
}

func captureStillImage(_ completed: @escaping (_ image: UIImage?) -> Void) {
    if let imageOutput = self.stillImageOutput {
        self.sessionQueue.async(execute: { () -> Void in

            var videoConnection: AVCaptureConnection?
            for connection in imageOutput.connections {
                let c = connection as! AVCaptureConnection

                for port in c.inputPorts {
                    let p = port as! AVCaptureInputPort
                    if p.mediaType == AVMediaTypeVideo {
                        videoConnection = c;
                        break
                    }
                }

                if videoConnection != nil {
                    break
                }
            }

            if videoConnection != nil {
                imageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (imageSampleBuffer: CMSampleBuffer!, error) -> Void in
                    let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageSampleBuffer)
                    let image: UIImage? = UIImage(data: imageData!)!

                    //DispatchQueue.main.async {
                        completed(image)

                    //}
                })
            } else {
                DispatchQueue.main.async {
                    completed(nil)
                }
            }
       })
    } else {
        completed(nil)
    }
}
func checkCameraPermisson() -> Void {
    if AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) ==  AVAuthorizationStatus.authorized{
        print("Already Authorized")
    }
    else{
        AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { (granted :Bool) -> Void in
            if granted == true{
                print("User granted")
            }
            else{
                NotificationCenter.default.post(name: Notification.Name(rawValue: "CameraAccessDenied"), object: nil)

            }
        });
    }

}

// MARK: Configuration

func addVideoInput() {

    if let inputs = self.session.inputs as? [AVCaptureDeviceInput] {
        for input in inputs {
            self.session.removeInput(input)
            self.session.addInput(input)
        }
    }

}

func addStillImageOutput() {
    stillImageOutput = AVCaptureStillImageOutput()
    stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]

    if self.session.canAddOutput(stillImageOutput) {
        session.addOutput(stillImageOutput)
    }
}


func deviceWithMediaTypeWithPosition(_ mediaType: NSString, position: AVCaptureDevicePosition) -> AVCaptureDevice {
    let devices: NSArray = AVCaptureDevice.devices(withMediaType: mediaType as String)! as NSArray
    var captureDevice: AVCaptureDevice = devices.firstObject as! AVCaptureDevice
    for device in devices {
        let d = device as! AVCaptureDevice
        if d.position == position {
            captureDevice = d

            break;

        }
    }
    return captureDevice
}

// MARK: Observers

func setObservers() {
    NotificationCenter.default.addObserver(self, selector: #selector(CameraLibrary.sessionDidStart(_:)), name: NSNotification.Name.AVCaptureSessionDidStartRunning, object: nil)
    NotificationCenter.default.addObserver(self, selector: #selector(CameraLibrary.sessionDidStop(_:)), name: NSNotification.Name.AVCaptureSessionDidStopRunning, object: nil)
}

func removeObservers() {
    NotificationCenter.default.removeObserver(self)
}

func sessionDidStart(_ notification: Notification) {
    DispatchQueue.main.async {
        NSLog("Session did start")
        self.delegate?.cameraSessionDidBegin()
    }
}

func sessionDidStop(_ notification: Notification) {
    DispatchQueue.main.async {
        NSLog("Session did stop")
        self.delegate?.cameraSessionDidStop()
    }
}
4

0 回答 0