5

我正在制作一个利用 AVFoundation 录制视频的示例应用程序。重点是我可以更好地控制视频的录制方式。在我的示例项目中,我进行了视频捕获,但在正确处理方向方面遇到了困难。

我在网上做了很多搜索,发现其他人建议我不应该让我的捕获视图或捕获会话根据方向旋转,而是设置一个转换来在播放期间旋转视频。我在 iOS 和 Mac 设备上运行良好,但我想知道在 Windows 或 Android 等其他平台上是否会出现问题。

此外,当我查看录制视频的元数据时,我发现宽度和高度未针对方向正确设置。这是有道理的,因为我只是在转换视频的呈现,而不是它的实际分辨率。

我的问题是我如何正确支持纵向和横向方向并将其正确反映在视频文件输出中?我需要这些视频在所有平台上正确播放,所以我认为分辨率会很重要。

以下是我迄今为止所写的完整来源。我很感激你们能提供的任何建议。

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

    //MARK: - Outlet

    @IBOutlet weak var previewView: UIView!
    @IBOutlet var playStopButton: UIButton!

    //MARK: - Private Variables

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
    private let captureSession = AVCaptureSession()

    var outputUrl: URL {
        get {

            if let url = _outputUrl {
                return url
            }


            _outputUrl = outputDirectory.appendingPathComponent("video.mp4")
            return _outputUrl!
        }
    }

    private var _outputUrl: URL?

    var outputDirectory: URL {
        get {

            if let url = _outputDirectory {
                return url
            }


            _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
            return _outputDirectory!
        }
    }

    private var _outputDirectory: URL?

    private var assetWriter: AVAssetWriter?
    private var videoInput: AVAssetWriterInput?
    private var audioInput: AVAssetWriterInput?
    private var videoOutput: AVCaptureVideoDataOutput?
    private var audioOutput: AVCaptureAudioDataOutput?

    private var isRecording = false
    private var isWriting = false

    private var videoSize = CGSize(width: 640, height: 480)

    //MARK: - View Life-cycle

    override func viewDidLoad() {
        super.viewDidLoad()


        videoQueue.async {

            do {

                try self.configureCaptureSession()
                try self.configureAssetWriter()

                DispatchQueue.main.async {
                    self.configurePreview()
                }

            } catch {

                DispatchQueue.main.async {
                    self.showAlert("Unable to configure video output")
                }
            }
        }
    }

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
        return .portrait
    }

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    //MARK: - Capture Session

    private func configureCaptureSession() throws {

        do {

            // configure the session
            if captureSession.canSetSessionPreset(AVCaptureSessionPreset640x480) {
                captureSession.sessionPreset = AVCaptureSessionPreset640x480
            }

            // configure capture devices
            let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
            let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)

            let camInput = try AVCaptureDeviceInput(device: camDevice)
            let micInput = try AVCaptureDeviceInput(device: micDevice)

            if captureSession.canAddInput(camInput) {
                captureSession.addInput(camInput)
            }

            if captureSession.canAddInput(micInput) {
                captureSession.addInput(micInput)
            }

            // configure audio/video output
            videoOutput = AVCaptureVideoDataOutput()
            videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
            videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let v = videoOutput {
                captureSession.addOutput(v)
            }

            audioOutput = AVCaptureAudioDataOutput()
            audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let a = audioOutput {
                captureSession.addOutput(a)
            }

            // configure audio session
            let audioSession = AVAudioSession.sharedInstance()
            try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
            try audioSession.setActive(true)

            var micPort: AVAudioSessionPortDescription?

            if let inputs = audioSession.availableInputs {
                for port in inputs {
                    if port.portType == AVAudioSessionPortBuiltInMic {
                        micPort = port
                        break;
                    }
                }
            }

            if let port = micPort, let dataSources = port.dataSources {

                for source in dataSources {
                    if source.orientation == AVAudioSessionOrientationFront {
                        try audioSession.setPreferredInput(port)
                        break
                    }
                }
            }

        } catch {
            print("Failed to configure audio/video capture session")
            throw error
        }
    }

    private func configureAssetWriter() throws {

        prepareVideoFile()

        do {

            assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

            guard let writer = assetWriter else {
                print("Asset writer not created")
                return
            }

            let vidSize = videoSize
            let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                 AVVideoWidthKey: NSNumber(value: Float(vidSize.width)),
                                 AVVideoHeightKey: NSNumber(value: Float(vidSize.height))]

            videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
            videoInput?.expectsMediaDataInRealTime = true
            videoInput?.transform = getVideoTransform()

            var channelLayout = AudioChannelLayout()
            memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
            channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;

            let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
                                                AVSampleRateKey: 44100,
                                                AVNumberOfChannelsKey: 2]

            audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
            audioInput?.expectsMediaDataInRealTime = true

            guard let vi = videoInput else {
                print("Video input not configured")
                return
            }

            guard let ai = audioInput else {
                print("Audio input not configured")
                return
            }

            if writer.canAdd(vi) {
                writer.add(vi)
            }

            if writer.canAdd(ai) {
                writer.add(ai)
            }

        } catch {
            print("Failed to configure asset writer")
            throw error
        }
    }

    private func prepareVideoFile() {

        if FileManager.default.fileExists(atPath: outputUrl.path) {

            do {
                try FileManager.default.removeItem(at: outputUrl)
            } catch {
                print("Unable to remove file at URL \(outputUrl)")
            }
        }

        if !FileManager.default.fileExists(atPath: outputDirectory.path) {

            do {
                try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
            } catch {
                print("Unable to create directory at URL \(outputDirectory)")
            }
        }
    }

    private func configurePreview() {

        if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            previewLayer.frame = previewView.bounds
            previewView.layer.addSublayer(previewLayer)
        }
    }

    private func getVideoSize() -> CGSize {

        if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {

            if videoSize.width > videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }

        } else {

            if videoSize.width < videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }
        }
    }

    private func getVideoTransform() -> CGAffineTransform {

        switch UIDevice.current.orientation {

        case .portraitUpsideDown:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * -90.0)) / 180.0)

        case .landscapeLeft:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: Add support for front facing camera
//            return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: For front facing camera

        case .landscapeRight:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: Add support for front facing camera
//            return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: For front facing camera

        default:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * 90.0)) / 180.0)
        }
    }

    //MARK: - Controls

    private func startRecording() {

        videoQueue.async {
            self.captureSession.startRunning()
        }

        isRecording = true
        playStopButton.setTitle("Stop Recording", for: .normal)
        print("Recording did start")
    }

    private func stopRecording() {

        if !isRecording {
            return
        }

        videoQueue.async {

            self.assetWriter?.finishWriting {
                print("Asset writer did finish writing")
                self.isWriting = false
            }

            self.captureSession.stopRunning()
        }

        isRecording = false

        playStopButton.setTitle("Start Recording", for: .normal)
        print("Recording did stop")
    }

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

        guard let w = assetWriter else {
            print("Asset writer not configured")
            return
        }

        guard let vo = videoOutput else {
            print("Video output not configured")
            return
        }

        guard let ao = audioOutput else {
            print("Audio output not configured")
            return
        }

        guard let vi = videoInput else {
            print("Video input not configured")
            return
        }

        guard let ai = audioInput else {
            print("Audio input not configured")
            return
        }

        let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        print("Writer status \(w.status.rawValue)")

        if let e = w.error {
            print("Writer error \(e)")
            stopRecording()
            return
        }

        switch w.status {

        case .unknown:

            if !isWriting {
                isWriting = true
                w.startWriting()
                w.startSession(atSourceTime: st)
            }

            return

        case .completed:
            print("Video writing completed")
            return

        case .cancelled:
            print("Video writing cancelled")
            return

        case .failed:
            print("Video writing failed")
            return

        default:
            print("Video is writing")
        }

        if vo == captureOutput {

            if !vi.append(sampleBuffer) {
                print("Unable to write to video buffer")
            }

        } else if ao == captureOutput {

            if !ai.append(sampleBuffer) {
                print("Unable to write to audio buffer")
            }
        }
    }

    //MARK: Helpers

    private func getDocumentsDirectory() -> URL {
        let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
        let documentsDirectory = paths[0]
        return documentsDirectory
    }

    //MARK: Actions

    @IBAction func startStopTapped(sender: AnyObject) {

        if isRecording {
            stopRecording()
        } else {
            startRecording()
        }
    }
}
4

4 回答 4

8

视频方向由 处理AVAssetWriterInput.transform,看起来getVideoTransform()实现不正确 -CGAffineTransform期望旋转角度以弧度为单位,因此需要更改为:

private func getVideoTransform() -> CGAffineTransform {
    switch UIDevice.current.orientation {
        case .portrait:
            return .identity
        case .portraitUpsideDown:
            return CGAffineTransform(rotationAngle: .pi)
        case .landscapeLeft:
            return CGAffineTransform(rotationAngle: .pi/2)
        case .landscapeRight:
            return CGAffineTransform(rotationAngle: -.pi/2)
        default:
            return .identity
        }
    }

来自 Apple 技术问答: https ://developer.apple.com/library/archive/qa/qa1744/_index.html

如果你使用一个AVAssetWriter对象来写一个电影文件,你可以使用transform关联的属性AVAssetWriterInput来指定输出文件的方向。这会将显示转换属性写入输出文件,作为用于显示目的的视觉媒体数据的首选转换。有关详细信息,请参阅 AVAssetWriterInput.h接口文件。

于 2019-07-11T12:47:52.840 回答
4

我找到了解决我的问题的方法。解决方案是使用 AVAssetExportSession 导出视频以处理设置视频大小,然后在导出时而不是在录制期间处理旋转。我仍然有一个问题,我需要修复比例因子以从原始视频尺寸变为更小的 640x480 分辨率,但至少我解决了旋转问题。请参阅下面的更新代码。

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

    //MARK: - Outlet

    @IBOutlet weak var previewView: UIView!
    @IBOutlet var playStopButton: UIButton!

    //MARK: - Private Variables

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
    private let captureSession = AVCaptureSession()

    var outputUrl: URL {
        get {

            if let url = _outputUrl {
                return url
            }

            _outputUrl = outputDirectory.appendingPathComponent("video.mp4")
            return _outputUrl!
        }
    }

    private var _outputUrl: URL?

    var exportUrl: URL {
        get {

            if let url = _exportUrl {
                return url
            }

            _exportUrl = outputDirectory.appendingPathComponent("video_encoded.mp4")
            return _exportUrl!
        }
    }

    private var _exportUrl: URL?

    var outputDirectory: URL {
        get {

            if let url = _outputDirectory {
                return url
            }

            _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
            return _outputDirectory!
        }
    }

    private var _outputDirectory: URL?

    private var assetWriter: AVAssetWriter?
    private var videoInput: AVAssetWriterInput?
    private var audioInput: AVAssetWriterInput?
    private var videoOutput: AVCaptureVideoDataOutput?
    private var audioOutput: AVCaptureAudioDataOutput?

    private var isRecording = false
    private var isWriting = false

    private var videoSize = CGSize(width: 640, height: 480)
    private var exportPreset = AVAssetExportPreset640x480

    //MARK: - View Life-cycle

    override func viewDidLoad() {
        super.viewDidLoad()

        videoQueue.async {

            do {

                try self.configureCaptureSession()

                DispatchQueue.main.sync {
                    self.configurePreview()
                }

            } catch {

                DispatchQueue.main.async {
                    self.showAlert("Unable to configure capture session")
                }
            }
        }
    }

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
        return .portrait
    }

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    //MARK: - Capture Session

    private func configureCaptureSession() throws {

        do {

            // configure capture devices
            let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
            let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)

            let camInput = try AVCaptureDeviceInput(device: camDevice)
            let micInput = try AVCaptureDeviceInput(device: micDevice)

            if captureSession.canAddInput(camInput) {
                captureSession.addInput(camInput)
            }

            if captureSession.canAddInput(micInput) {
                captureSession.addInput(micInput)
            }

            // configure audio/video output
            videoOutput = AVCaptureVideoDataOutput()
            videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
            videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let v = videoOutput {
                captureSession.addOutput(v)
            }

            audioOutput = AVCaptureAudioDataOutput()
            audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let a = audioOutput {
                captureSession.addOutput(a)
            }

            // configure audio session
            let audioSession = AVAudioSession.sharedInstance()
            try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
            try audioSession.setActive(true)

            var micPort: AVAudioSessionPortDescription?

            if let inputs = audioSession.availableInputs {
                for port in inputs {
                    if port.portType == AVAudioSessionPortBuiltInMic {
                        micPort = port
                        break;
                    }
                }
            }

            if let port = micPort, let dataSources = port.dataSources {

                for source in dataSources {
                    if source.orientation == AVAudioSessionOrientationFront {
                        try audioSession.setPreferredInput(port)
                        break
                    }
                }
            }

        } catch {
            print("Failed to configure audio/video capture session")
            throw error
        }
    }

    private func configureAssetWriter() throws {

        prepareVideoFile()

        do {

            if assetWriter != nil {
                assetWriter = nil
                videoInput = nil
                audioInput = nil
            }

            assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

            guard let writer = assetWriter else {
                print("Asset writer not created")
                return
            }

            let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                 AVVideoWidthKey: NSNumber(value: Float(videoSize.width)),
                                 AVVideoHeightKey: NSNumber(value: Float(videoSize.height))]

            videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
            videoInput?.expectsMediaDataInRealTime = true

            var channelLayout = AudioChannelLayout()
            memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
            channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;

            let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
                                                AVSampleRateKey: 44100,
                                                AVNumberOfChannelsKey: 2]

            audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
            audioInput?.expectsMediaDataInRealTime = true

            guard let vi = videoInput else {
                print("Video input not configured")
                return
            }

            guard let ai = audioInput else {
                print("Audio input not configured")
                return
            }

            if writer.canAdd(vi) {
                writer.add(vi)
            }

            if writer.canAdd(ai) {
                writer.add(ai)
            }

        } catch {
            print("Failed to configure asset writer")
            throw error
        }
    }

    private func prepareVideoFile() {

        if FileManager.default.fileExists(atPath: outputUrl.path) {

            do {
                try FileManager.default.removeItem(at: outputUrl)
            } catch {
                print("Unable to remove file at URL \(outputUrl)")
            }
        }

        if !FileManager.default.fileExists(atPath: outputDirectory.path) {

            do {
                try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
            } catch {
                print("Unable to create directory at URL \(outputDirectory)")
            }
        }
    }

    private func configurePreview() {

        if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            previewLayer.frame = previewView.bounds
            previewView.layer.addSublayer(previewLayer)
        }
    }

    private func getVideoSize() -> CGSize {

        if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {

            if videoSize.width > videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }

        } else {

            if videoSize.width < videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }
        }
    }

    //MARK: - Controls

    private func startRecording() {

        videoQueue.async {

            do {
                try self.configureAssetWriter()
                self.captureSession.startRunning()

            } catch {
                print("Unable to start recording")
                DispatchQueue.main.async { self.showAlert("Unable to start recording") }
            }
        }

        isRecording = true
        playStopButton.setTitle("Stop Recording", for: .normal)
        print("Recording did start")
    }

    private func stopRecording() {

        if !isRecording {
            return
        }

        videoQueue.async {

            self.assetWriter?.finishWriting {
                print("Asset writer did finish writing")
                self.isWriting = false
            }

            self.captureSession.stopRunning()

            do {
                try self.export()
            } catch {
                print("Export failed")
                DispatchQueue.main.async { self.showAlert("Unable to export video") }
            }
        }

        isRecording = false

        playStopButton.setTitle("Start Recording", for: .normal)
        print("Recording did stop")
    }

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

        guard let w = assetWriter else {
            print("Asset writer not configured")
            return
        }

        guard let vo = videoOutput else {
            print("Video output not configured")
            return
        }

        guard let ao = audioOutput else {
            print("Audio output not configured")
            return
        }

        guard let vi = videoInput else {
            print("Video input not configured")
            return
        }

        guard let ai = audioInput else {
            print("Audio input not configured")
            return
        }

        let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        print("Writer status \(w.status.rawValue)")

        if let e = w.error {
            print("Writer error \(e)")
            stopRecording()
            return
        }

        switch w.status {

        case .unknown:

            if !isWriting {
                isWriting = true
                w.startWriting()
                w.startSession(atSourceTime: st)
            }

            return

        case .completed:
            print("Video writing completed")
            return

        case .cancelled:
            print("Video writing cancelled")
            return

        case .failed:
            print("Video writing failed")
            return

        default:
            print("Video is writing")
        }

        if vo == captureOutput {

            if !vi.append(sampleBuffer) {
                print("Unable to write to video buffer")
            }

        } else if ao == captureOutput {

            if !ai.append(sampleBuffer) {
                print("Unable to write to audio buffer")
            }
        }
    }

    //MARK: - Export

    private func getVideoComposition(asset: AVAsset, videoSize: CGSize) -> AVMutableVideoComposition? {

        guard let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else {
            print("Unable to get video tracks")
            return nil
        }

        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = videoSize

        let seconds: Float64 = Float64(1.0 / videoTrack.nominalFrameRate)
        videoComposition.frameDuration = CMTimeMakeWithSeconds(seconds, 600);

        let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)

        var transforms = asset.preferredTransform

        var isPortrait = true;

        if (transforms.a == 0.0 && transforms.b == 1.0 && transforms.c == -1.0 && transforms.d == 0)
        || (transforms.a == 0.0 && transforms.b == -1.0 && transforms.c == 1.0 && transforms.d == 0) {
            isPortrait = false;
        }

        if isPortrait {
            transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0.degreesToRadians)))
            transforms = transforms.concatenating(CGAffineTransform(translationX: videoSize.width, y: 0))
        }

        layerInst.setTransform(transforms, at: kCMTimeZero)

        let inst = AVMutableVideoCompositionInstruction()
        inst.backgroundColor = UIColor.black.cgColor
        inst.layerInstructions = [layerInst]
        inst.timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)

        videoComposition.instructions = [inst]

        return videoComposition

    }

    private func export() throws {

        let videoAsset = AVURLAsset(url: outputUrl)

        if FileManager.default.fileExists(atPath: exportUrl.path) {
            try FileManager.default.removeItem(at: exportUrl)
        }

        let videoSize = getVideoSize()

        guard let encoder = AVAssetExportSession(asset: videoAsset, presetName: exportPreset) else {
            print("Unable to create encoder")
            return
        }

        guard let vidcomp = getVideoComposition(asset: videoAsset, videoSize: videoSize) else {
            print("Unable to create video composition")
            return
        }

        encoder.videoComposition = vidcomp
        encoder.outputFileType = AVFileTypeMPEG4  // MP4 format
        encoder.outputURL = exportUrl
        encoder.shouldOptimizeForNetworkUse = true

        encoder.exportAsynchronously(completionHandler: {
            print("Video exported successfully")
        })
    }

    //MARK: Helpers

    private func getDocumentsDirectory() -> URL {
        let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
        let documentsDirectory = paths[0]
        return documentsDirectory
    }

    //MARK: Actions

    @IBAction func startStopTapped(sender: AnyObject) {

        if isRecording {
            stopRecording()
        } else {
            startRecording()
        }
    }
}
于 2017-03-03T12:31:32.780 回答
2

我发现最简单的方法是根据所需的方向在视频合成轨道上设置 preferredTransform。

解决方案

        // Devices orientation
        var orientation = UIDevice.current.orientation

        // The composition
        let audioVideoComposition = AVMutableComposition()
        
        // The video track of the composition
        let videoCompositionTrack = audioVideoComposition
            .addMutableTrack(withMediaType: .video, preferredTrackID: .init())!

        // Set preferred transform
        videoCompositionTrack.preferredTransform = getVideoTransform()

辅助功能和扩展

    func getVideoTransform() -> CGAffineTransform {
        switch orientation {
            case .portrait:
                return CGAffineTransform(rotationAngle: 90.degreesToRadians)
            case .portraitUpsideDown:
                return CGAffineTransform(rotationAngle: 180)
            case .landscapeLeft:
                return CGAffineTransform(rotationAngle: 0.degreesToRadians)
            case .landscapeRight:
                return CGAffineTransform(rotationAngle: 180.degreesToRadians)
            default:
                return CGAffineTransform(rotationAngle: 90.degreesToRadians)
        }
    }

extension BinaryInteger {
    var degreesToRadians: CGFloat { CGFloat(self) * .pi / 180 }
}

extension FloatingPoint {
    var degreesToRadians: Self { self * .pi / 180 }
    var radiansToDegrees: Self { self * 180 / .pi }
}
于 2020-10-07T08:20:59.167 回答
0

只需在编写器设置中交换宽度和高度

不要忘记 HEVC

assetWriter = 尝试 AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

https://developer.apple.com/videos/play/wwdc2017/503 https://developer.apple.com/videos/play/wwdc2017/511

- (BOOL)
configureWriterInput {
    const BOOL isError = YES;

    AVFileType
        mov = AVFileTypeQuickTimeMovie;

    NSDictionary <NSString *, id> *settings;

    // HEVC
    if (@available(iOS 11.0, *)) {
        NSArray <AVVideoCodecType> *available = [self.sessionOutput availableVideoCodecTypesForAssetWriterWithOutputFileType:
            mov];

        const BOOL isHEVC = [available containsObject:AVVideoCodecTypeHEVC];

        if (isHEVC) {
            settings = [self.sessionOutput recommendedVideoSettingsForVideoCodecType:
                AVVideoCodecTypeHEVC

            assetWriterOutputFileType:
                mov];
        }
        else {
            settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
                mov];
        }
    }
    else {
        settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
            mov];
    }

    if ([writer
    canApplyOutputSettings:settings forMediaType:AVMediaTypeVideo]) {

        // swap width and height to fix orientation

        NSMutableDictionary <NSString *, id> *rotate =
            [settings mutableCopy];

        if (YES
            && settings[AVVideoHeightKey]
            && settings[AVVideoWidthKey]
        ) {
            rotate[AVVideoHeightKey] = settings[AVVideoWidthKey];
            rotate[AVVideoWidthKey] = settings[AVVideoHeightKey];

            if ([writer
            canApplyOutputSettings:rotate forMediaType:AVMediaTypeVideo]) {
                settings = rotate;
            }
            else {
            }
        }
        else {

        }
    }
    else {
        return isError;
    }

    writerInput = [AVAssetWriterInput
        assetWriterInputWithMediaType:AVMediaTypeVideo
        outputSettings:settings];

    {
        // AVCaptureConnection *con =
            // [self.sessionOutput connectionWithMediaType:AVMediaTypeVideo];

        // const AVCaptureVideoOrientation o = con.videoOrientation;

        // writerInput.transform = [[self class] configureOrientationTransform:o];
    }

    if ([writer canAddInput:writerInput]) {
        [writer addInput:writerInput];
        return ! isError;
    }
    else {
        return isError;
    }
}
于 2019-04-18T06:31:10.470 回答