4

我想防止应用程序在视频录制和拍照之间切换时出现延迟:仅使用 AVCaptureMovieFileOutput 并在捕获图像时从中获取快照。

就像 SnapChat 一样。

有可能吗?我还没有找到任何与此相关的文章。

我不想在输出之间切换,因为它滞后

编码:

@IBOutlet var cameraView: UIView!

@IBOutlet var cameraSwitchButton: UIButton!
@IBOutlet var captureButtonView: CaptureButton!
@IBOutlet var cameraFlashButton: UIButton!

var captureSession = AVCaptureSession()
let movieOutput = AVCaptureMovieFileOutput()
var activeInput: AVCaptureDeviceInput!
var previewLayer = AVCaptureVideoPreviewLayer()

var outputURL: URL!

var connection : AVCaptureConnection!


override func viewDidLoad() {

    if setupSession() {
        setupPreview()
        startSession()
        connection = movieOutput.connection(with: AVMediaType.video)
        if (connection?.isVideoStabilizationSupported)! {
            connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.off
        }
    }


    let tapGesture = UITapGestureRecognizer(target: self, action: #selector(captureButtonTapped))
    let longGesture = UILongPressGestureRecognizer(target: self, action: #selector(captureButtonLongPressed))
    tapGesture.numberOfTapsRequired = 1
    captureButtonView.addGestureRecognizer(tapGesture)
    captureButtonView.addGestureRecognizer(longGesture)

}

@objc func captureButtonTapped(){

    ?? TAKE PHOTO HERE ??


}
var isRecordingVideo : Bool = false
@objc func captureButtonLongPressed(sender : UILongPressGestureRecognizer){
    if sender.state == .began {
        isRecordingVideo = true

        startRecording()
        captureButtonView.startTimer(duration: 10.0)

    }

    if sender.state == .ended || sender.state == .failed || sender.state == .cancelled {
        captureButtonView.clear()
        isRecordingVideo = false
        stopRecording()
    }
}


func setupPreview() {
    // Configure previewLayer
    previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    previewLayer.frame = cameraView.bounds
    previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
    cameraView.layer.addSublayer(previewLayer)
}

//MARK:- Setup Camera

func setupSession() -> Bool {

    captureSession.sessionPreset = AVCaptureSession.Preset.high

    // Setup Camera
    let camera = AVCaptureDevice.default(for: AVMediaType.video)

    do {
        let input = try AVCaptureDeviceInput(device: camera!)
        if captureSession.canAddInput(input) {
            captureSession.addInput(input)
            activeInput = input
        }
    } catch {
        print("Error setting device video input: \(error)")
        return false
    }

    // Setup Microphone
    let microphone = AVCaptureDevice.default(for: AVMediaType.audio)

    do {
        let micInput = try AVCaptureDeviceInput(device: microphone!)
        if captureSession.canAddInput(micInput) {
            captureSession.addInput(micInput)
        }
    } catch {
        print("Error setting device audio input: \(error)")
        return false
    }


    // Movie output
    if captureSession.canAddOutput(movieOutput) {
        captureSession.addOutput(movieOutput)
    }

    return true
}

func setupCaptureMode(_ mode: Int) {

}

//MARK:- Camera Session
func startSession() {


    if !captureSession.isRunning {
        videoQueue().async {
            self.captureSession.startRunning()
        }
    }
}

func stopSession() {
    if captureSession.isRunning {
        videoQueue().async {
            self.captureSession.stopRunning()
        }
    }
}

func videoQueue() -> DispatchQueue {
    return DispatchQueue.main
}

func currentVideoOrientation() -> AVCaptureVideoOrientation {
    var orientation: AVCaptureVideoOrientation

    switch UIDevice.current.orientation {
    case .portrait:
        orientation = AVCaptureVideoOrientation.portrait
    case .landscapeRight:
        orientation = AVCaptureVideoOrientation.landscapeLeft
    case .portraitUpsideDown:
        orientation = AVCaptureVideoOrientation.portraitUpsideDown
    default:
        orientation = AVCaptureVideoOrientation.landscapeRight
    }

    return orientation
}

func startCapture() {

    startRecording()

}

func tempURL() -> URL? {
    let directory = NSTemporaryDirectory() as NSString

    if directory != "" {
        let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
        return URL(fileURLWithPath: path)
    }

    return nil
}


func startRecording() {

    if movieOutput.isRecording == false {


        if (connection?.isVideoOrientationSupported)! {
            connection?.videoOrientation = currentVideoOrientation()
        }

        let device = activeInput.device
        if (device.isSmoothAutoFocusSupported) {
            do {
                try device.lockForConfiguration()
                device.isSmoothAutoFocusEnabled = false
                device.unlockForConfiguration()
            } catch {
                print("Error setting configuration: \(error)")
            }

        }

        outputURL = tempURL()
        movieOutput.startRecording(to: outputURL, recordingDelegate: self)

    }
    else {
        stopRecording()
    }

}

func stopRecording() {

    if movieOutput.isRecording == true {
        movieOutput.stopRecording()
    }
}


func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
    if (error != nil) {
        print("Error recording movie: \(error!.localizedDescription)")
    } else {

        UISaveVideoAtPathToSavedPhotosAlbum(outputURL.path, nil, nil, nil)

        _ = outputURL as URL

    }
    outputURL = nil
}
4

1 回答 1

6

我无法找到仅使用的方法AVCaptureMovieFileOutput,但是您可以添加额外的照片输出并触发照片,而无需在输出之间切换。

我现在的时间很短,但这应该会让你继续前进,直到我可以编辑更多信息。

(请参阅下面的完整实施编辑,以及有限的力展开)

首先在视图控制器中为照片输出设置一个额外的 var

// declare an additional camera output var
var cameraOutput = AVCapturePhotoOutput()

// do this in your 'setupSession' func where you setup your movie output
cameraOutput.isHighResolutionCaptureEnabled = true
captureSession.addOutput(cameraOutput)

使用 cameraOutput 声明一个函数来捕捉你的照片:

func capturePhoto() {
    // create settings for your photo capture
    let settings = AVCapturePhotoSettings()
    let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
    let previewFormat = [
        kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
        kCVPixelBufferWidthKey as String: UIScreen.main.bounds.size.width,
        kCVPixelBufferHeightKey as String: UIScreen.main.bounds.size.height
        ] as [String : Any]
    settings.previewPhotoFormat = previewFormat
    cameraOutput.capturePhoto(with: settings, delegate: self)
}

并符合AVCapturePhotoCaptureDelegate.

我创建了一个单独的类VideoFeed来管理视频捕获会话,所以这个示例是该类的扩展。稍后我将更新有关此的更多信息。

loadImage(data: Data)函数使用图像调用委托。如果您将其直接放在视图控制器中,则可以忽略该调用,并保存或对生成的照片执行任何您喜欢的操作:

extension VideoFeed: AVCapturePhotoCaptureDelegate {
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
        guard error == nil else {
            print("Photo Error: \(String(describing: error))")
            return
        }

        guard let sampleBuffer = photoSampleBuffer,
            let previewBuffer = previewPhotoSampleBuffer,
            let outputData =  AVCapturePhotoOutput
            .jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) else {
                    print("Oops, unable to create jpeg image")
            return
        }

        print("captured photo...")
        loadImage(data: outputData)
    }

    func loadImage(data: Data) {
        let dataProvider = CGDataProvider(data: data as CFData)
        let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
        let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
        // do whatever you like with the generated image here...
        delegate?.processVideoSnapshot(image)
    }
}

编辑:

这是我在测试项目中使用的完整实现。

首先,我将所有 AVFoundation 特定代码移到它自己的VideoFeed类中,并为视图控制器创建了一些回调。

这将关注点分开并将视图控制器的职责限制为:

  • 将预览层添加到视图
  • 触发和处理捕获的图像/屏幕截图
  • 开始/停止视频文件录制。

这是 ViewController 的实现:

ViewController.swift

import UIKit
import AVFoundation


class ViewController: UIViewController, VideoFeedDelegate {

    @IBOutlet var cameraView: UIView!

    var videoFeed: VideoFeed?

    override func viewDidLoad() {
        super.viewDidLoad()
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        // end session
        videoFeed?.stopSession()
    }

    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)

        // request camera access
        AVCaptureDevice.requestAccess(for: AVMediaType.video) { [weak self] granted in
            guard granted != false else {
                // TODO: show UI stating camera cannot be used, update in settings app...
                print("Camera access denied")
                return
            }
            DispatchQueue.main.async {

                if self?.videoFeed == nil {
                    // video access was enabled so setup video feed
                    self?.videoFeed = VideoFeed(delegate: self)
                } else {
                    // video feed already available, restart session...
                    self?.videoFeed?.startSession()
                }

            }
        }
    }

    // MARK: VideoFeedDelegate
    func videoFeedSetup(with layer: AVCaptureVideoPreviewLayer) {

        // set the layer size
        layer.frame = cameraView.layer.bounds

        // add to view
        cameraView.layer.addSublayer(layer)
    }

    func processVideoSnapshot(_ image: UIImage?) {

        // validate
        guard let image = image else {
            return
        }

        // SAVE IMAGE HERE IF DESIRED

        // for now just showing in a lightbox/detail view controller
        let storyboard = UIStoryboard(name: "Main", bundle: Bundle(for: AppDelegate.self))
        let vc = storyboard.instantiateViewController(withIdentifier: "LightboxViewController") as! LightboxViewController
        vc.previewImage = image
        navigationController?.pushViewController(vc, animated: true)
    }

    @IBAction func captureButtonTapped(_ sender: Any){

        // trigger photo capture from video feed...
        // this will trigger a callback to the function above with the captured image
        videoFeed?.capturePhoto()
    }
}

VideoFeed这是该类的完整实现。

使用这种方法,您可以更轻松地在其他项目中重用视频功能,而无需将其与视图控制器紧密耦合。

VideoFeed.swift

import UIKit
import AVFoundation

/// Defines callbacks associated with the VideoFeed class. Notifies delegate of significant events.
protocol VideoFeedDelegate: class {

    /// Callback triggered when the preview layer for this class has been created and configured. Conforming objects should set and maintain a strong reference to this layer otherwise it will be set to nil when the calling function finishes execution.
    ///
    /// - Parameter layer: The video preview layer associated with the active captureSession in the VideoFeed class.
    func videoFeedSetup(with layer: AVCaptureVideoPreviewLayer)

    /// Callback triggered when a snapshot of the video feed has been generated.
    ///
    /// - Parameter image: <#image description#>
    func processVideoSnapshot(_ image: UIImage?)
}

class VideoFeed: NSObject {

    // MARK: Variables

    /// The capture session to be used in this class.
    var captureSession = AVCaptureSession()

    /// The preview layer associated with this session. This class has a
    /// weak reference to this layer, the delegate (usually a ViewController 
    /// instance) should add this layer as a sublayer to its preview UIView. 
    /// The delegate will have the strong reference to this preview layer.
    weak var previewLayer: AVCaptureVideoPreviewLayer?

    /// The output that handles saving the video stream to a file.
    var fileOutput: AVCaptureMovieFileOutput?

    /// A reference to the active video input
    var activeInput: AVCaptureDeviceInput?

    /// Output for capturing frame grabs of video feed
    var cameraOutput = AVCapturePhotoOutput()

    /// Delegate to receive callbacks about significant events triggered by this class.
    weak var delegate: VideoFeedDelegate?

    /// The capture connection associated with the fileOutput. 
    /// Set when fileOutput is created.
    var connection : AVCaptureConnection?


    // MARK: Public accessors

    /// Public initializer. Accepts a delegate to receive callbacks with the preview layer and any snapshot images.
    ///
    /// - Parameter delegate: A reference to an object conforming to VideoFeedDelegate 
    /// to receive callbacks for significant events in this class.
    init(delegate: VideoFeedDelegate?) {
        self.delegate = delegate
        super.init()
        setupSession()
    }

    /// Public accessor to begin a capture session.
    public func startSession() {
        guard captureSession.isRunning == false else {
            return
        }

        captureSession.startRunning()
    }

    /// Public accessor to end the current capture session.
    public func stopSession() {

        // validate
        guard captureSession.isRunning else {
            return
        }

        // end file recording if the session ends and we're currently recording a video to file
        if let isRecording = fileOutput?.isRecording, isRecording {
            stopRecording()
        }

        captureSession.stopRunning()
    }

    /// Public accessor to begin file recording.
    public func startRecording() {

        guard fileOutput?.isRecording == false else {
            stopRecording()
            return
        }

        configureVideoOrientation()
        disableSmoothAutoFocus()

        guard let url = tempURL() else {
            print("Unable to start file recording, temp url generation failed.")
            return
        }

        fileOutput?.startRecording(to: url, recordingDelegate: self)
    }

    /// Public accessor to end file recording.
    public func stopRecording() {
        guard fileOutput?.isRecording == true else {
            return
        }

        fileOutput?.stopRecording()
    }

    /// Public accessor to trigger snapshot capture of video stream.
    public func capturePhoto() {

        // create settings object
        let settings = AVCapturePhotoSettings()

        // verify that we have a pixel format type available
        guard let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first else {
            print("Unable to configure photo capture settings, 'availablePreviewPhotoPixelFormatTypes' has no available options.")
            return
        }

        let screensize = UIScreen.main.bounds.size

        // setup format configuration dictionary
        let previewFormat: [String : Any] = [
            kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
            kCVPixelBufferWidthKey as String: screensize.width,
            kCVPixelBufferHeightKey as String: screensize.height
            ]
        settings.previewPhotoFormat = previewFormat

        // trigger photo capture
        cameraOutput.capturePhoto(with: settings, delegate: self)
    }

    // MARK: Setup functions

    /// Handles configuration and setup of the session, inputs, video preview layer and outputs.
    /// If all are setup and configured it starts the session.
    internal func setupSession() {

        captureSession.sessionPreset = AVCaptureSession.Preset.high
        guard setupInputs() else {
            return
        }

        setupOutputs()
        setupVideoLayer()
        startSession()
    }

    /// Sets up capture inputs for this session.
    ///
    /// - Returns: Returns true if inputs are successfully setup, else false.
    internal func setupInputs() -> Bool {

        // only need access to this functionality within this function, so declare as sub-function
        func addInput(input: AVCaptureInput) {
            guard captureSession.canAddInput(input) else {
                return
            }

            captureSession.addInput(input)
        }

        do {
            if let camera = AVCaptureDevice.default(for: AVMediaType.video) {
                let input = try AVCaptureDeviceInput(device: camera)
                addInput(input: input)
                activeInput = input
            }

            // Setup Microphone
            if let microphone = AVCaptureDevice.default(for: AVMediaType.audio) {
                let micInput = try AVCaptureDeviceInput(device: microphone)
                addInput(input: micInput)
            }

            return true
        } catch {
            print("Error setting device video input: \(error)")
            return false
        }
    }

    internal func setupOutputs() {

        // only need access to this functionality within this function, so declare as sub-function
        func addOutput(output: AVCaptureOutput) {
            if captureSession.canAddOutput(output) {
                captureSession.addOutput(output)
            }
        }

        // file output
        let fileOutput = AVCaptureMovieFileOutput()
        captureSession.addOutput(fileOutput)

        if let connection = fileOutput.connection(with: .video), connection.isVideoStabilizationSupported {
            connection.preferredVideoStabilizationMode = .off
            self.connection = connection
        }

        cameraOutput.isHighResolutionCaptureEnabled = true
        captureSession.addOutput(cameraOutput)

    }

    internal func setupVideoLayer() {
        let layer =  AVCaptureVideoPreviewLayer(session: captureSession)
        layer.videoGravity = AVLayerVideoGravity.resizeAspectFill
        delegate?.videoFeedSetup(with: layer)
        previewLayer = layer
    }

    // MARK: Helper functions

    /// Creates a url in the temporary directory for file recording.
    ///
    /// - Returns: A file url if successful, else nil.
    internal func tempURL() -> URL? {
        let directory = NSTemporaryDirectory() as NSString

        if directory != "" {
            let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
            return URL(fileURLWithPath: path)
        }

        return nil
    }

    /// Disables smooth autofocus functionality on the active device, 
    /// if the active device is set and 'isSmoothAutoFocusSupported' 
    /// is supported for the currently set active device.
    internal func disableSmoothAutoFocus() {

        guard let device = activeInput?.device, device.isSmoothAutoFocusSupported else {
            return
        }

        do {
            try device.lockForConfiguration()
            device.isSmoothAutoFocusEnabled = false
            device.unlockForConfiguration()
        } catch {
            print("Error disabling smooth autofocus: \(error)")
        }

    }

    /// Sets the current AVCaptureVideoOrientation on the currently active connection if it's supported.
    internal func configureVideoOrientation() {

        guard let connection = connection, connection.isVideoOrientationSupported,
        let currentOrientation = AVCaptureVideoOrientation(rawValue: UIApplication.shared.statusBarOrientation.rawValue) else {
                return
        }

        connection.videoOrientation = currentOrientation
    }
}

// MARK: AVCapturePhotoCaptureDelegate
extension VideoFeed: AVCapturePhotoCaptureDelegate {

    // iOS 11+ processing
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        guard error == nil, let outputData = photo.fileDataRepresentation() else {
            print("Photo Error: \(String(describing: error))")
            return
        }

        print("captured photo...")
        loadImage(data: outputData)
    }

    // iOS < 11 processing
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {

        if #available(iOS 11.0, *) {
            // use iOS 11-only feature
            // nothing to do here as iOS 11 uses the callback above
        } else {
            guard error == nil else {
                print("Photo Error: \(String(describing: error))")
                return
            }

            guard let sampleBuffer = photoSampleBuffer,
                let previewBuffer = previewPhotoSampleBuffer,
                let outputData =  AVCapturePhotoOutput
                .jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) else {
                        print("Image creation from sample buffer/preview buffer failed.")
                        return
            }

            print("captured photo...")
            loadImage(data: outputData)
        }
    }

    /// Creates a UIImage from Data object received from AVCapturePhotoOutput 
    /// delegate callback and sends to the VideoFeedDelegate for handling.
    ///
    /// - Parameter data: Image data.
    internal func loadImage(data: Data) {
        guard let dataProvider = CGDataProvider(data: data as CFData), let cgImageRef: CGImage = CGImage(jpegDataProviderSource: dataProvider, decode: nil, shouldInterpolate: true, intent: .defaultIntent) else {
            return
        }
        let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
        delegate?.processVideoSnapshot(image)
    }
}

extension VideoFeed: AVCaptureFileOutputRecordingDelegate {

    func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
        print("Video recording started: \(fileURL.absoluteString)")
    }

    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {

        guard error == nil else {
            print("Error recording movie: \(String(describing: error))")
            return
        }

        UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
    }
}

对于其他使用此功能的人,不要忘记向您的 info.plist 添加权限,以访问相机、照片库和麦克风。

<key>NSCameraUsageDescription</key>
<string>Let us use your camera</string>
<key>NSPhotoLibraryAddUsageDescription</key>
<string>save to images</string>
<key>NSMicrophoneUsageDescription</key>
<string>for sound in video</string>
于 2018-08-14T01:37:34.637 回答