1

这个虫子快把我逼疯了。我正在尝试AVDepthData使用其 DualCam 生成从 iPhone 7+ 获取的绝对最小代码。

我有这个代码:


//
//  RecorderViewController.swift
//  ios-recorder-app


import UIKit
import AVFoundation


class RecorderViewController: UIViewController {

    @IBOutlet weak var previewView: UIView!

    @IBAction func onTapTakePhoto(_ sender: Any) {

        guard let capturePhotoOutput = self.capturePhotoOutput else { return }

        let photoSettings = AVCapturePhotoSettings()

        photoSettings.isDepthDataDeliveryEnabled = true //Error

        capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)

    }

    var session: AVCaptureSession?
    var videoPreviewLayer: AVCaptureVideoPreviewLayer?
    var capturePhotoOutput: AVCapturePhotoOutput?


    override func viewDidLoad() {
        super.viewDidLoad()

        AVCaptureDevice.requestAccess(for: .video, completionHandler: { _ in })

        let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .depthData, position: .back)

        do {
            print(captureDevice!)
            let input = try AVCaptureDeviceInput(device: captureDevice!)

            self.capturePhotoOutput = AVCapturePhotoOutput()
            self.capturePhotoOutput?.isDepthDataDeliveryEnabled = true //Error

            self.session = AVCaptureSession()
            self.session?.addInput(input)

            self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
            self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
            self.videoPreviewLayer?.frame = view.layer.bounds
            previewView.layer.addSublayer(self.videoPreviewLayer!)

            self.session?.addOutput(self.capturePhotoOutput!)
            self.session?.startRunning()

        } catch {
            print(error)
        }

    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

}

extension RecorderViewController : AVCapturePhotoCaptureDelegate {

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        print(photo.depthData)

    }


}

如果我注释掉标有“错误”的行,代码将按我的预期工作,并打印nil.depthData

但是,保持原样,我得到一个例外。错误消息指出:AVCapturePhotoOutput setDepthDataDeliveryEnabled:] Depth data delivery is not supported in the current configuration

如何更改“当前配置”以支持深度交付?

我看过这个视频:https ://developer.apple.com/videos/play/wwdc2017/507/这很有帮助,我相信我已经遵循了完成这项工作所需的确切步骤。

任何提示将不胜感激!

4

1 回答 1

6

我需要解决两件事。

  1. 将 a 设置为sessionPreset支持深度的格式,例如.photo.
  2. 在设置之前将 cameraPhotoOutput 添加到会话.isDepthDataDeliveryEnabled = true

这是我获取照片深度的最小代码:


//
//  RecorderViewController.swift
//  ios-recorder-app
//

import UIKit
import AVFoundation


class RecorderViewController: UIViewController {

    @IBOutlet weak var previewView: UIView!

    @IBAction func onTapTakePhoto(_ sender: Any) {

        guard var capturePhotoOutput = self.capturePhotoOutput else { return }

        var photoSettings = AVCapturePhotoSettings()
        photoSettings.isDepthDataDeliveryEnabled = true

        capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)

    }

    var session: AVCaptureSession?
    var videoPreviewLayer: AVCaptureVideoPreviewLayer?
    var capturePhotoOutput: AVCapturePhotoOutput?


    override func viewDidLoad() {
        super.viewDidLoad()

        AVCaptureDevice.requestAccess(for: .video, completionHandler: { _ in })

        let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)

        print(captureDevice!.activeDepthDataFormat)

        do{
            let input = try AVCaptureDeviceInput(device: captureDevice!)

            self.capturePhotoOutput = AVCapturePhotoOutput()

            self.session = AVCaptureSession()
            self.session?.beginConfiguration()
            self.session?.sessionPreset = .photo
            self.session?.addInput(input)

            self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
            self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
            self.videoPreviewLayer?.frame = self.view.layer.bounds
            self.previewView.layer.addSublayer(self.videoPreviewLayer!)

            self.session?.addOutput(self.capturePhotoOutput!)
            self.session?.commitConfiguration()
            self.capturePhotoOutput?.isDepthDataDeliveryEnabled = true
            self.session?.startRunning()
        }
        catch{
            print(error)
        }

    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

}

extension RecorderViewController : AVCapturePhotoCaptureDelegate {

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        print(photo.depthData)
    }


}

于 2018-03-15T20:37:03.360 回答