我正在制作一个利用 AVFoundation 录制视频的示例应用程序。重点是我可以更好地控制视频的录制方式。在我的示例项目中,我进行了视频捕获,但在正确处理方向方面遇到了困难。
我在网上做了很多搜索,发现其他人建议我不应该让我的捕获视图或捕获会话根据方向旋转,而是设置一个转换来在播放期间旋转视频。我在 iOS 和 Mac 设备上运行良好,但我想知道在 Windows 或 Android 等其他平台上是否会出现问题。
此外,当我查看录制视频的元数据时,我发现宽度和高度未针对方向正确设置。这是有道理的,因为我只是在转换视频的呈现,而不是它的实际分辨率。
我的问题是我如何正确支持纵向和横向方向并将其正确反映在视频文件输出中?我需要这些视频在所有平台上正确播放,所以我认为分辨率会很重要。
以下是我迄今为止所写的完整来源。我很感激你们能提供的任何建议。
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
//MARK: - Outlet
@IBOutlet weak var previewView: UIView!
@IBOutlet var playStopButton: UIButton!
//MARK: - Private Variables
let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
private let captureSession = AVCaptureSession()
var outputUrl: URL {
get {
if let url = _outputUrl {
return url
}
_outputUrl = outputDirectory.appendingPathComponent("video.mp4")
return _outputUrl!
}
}
private var _outputUrl: URL?
var outputDirectory: URL {
get {
if let url = _outputDirectory {
return url
}
_outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
return _outputDirectory!
}
}
private var _outputDirectory: URL?
private var assetWriter: AVAssetWriter?
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private var videoOutput: AVCaptureVideoDataOutput?
private var audioOutput: AVCaptureAudioDataOutput?
private var isRecording = false
private var isWriting = false
private var videoSize = CGSize(width: 640, height: 480)
//MARK: - View Life-cycle
override func viewDidLoad() {
super.viewDidLoad()
videoQueue.async {
do {
try self.configureCaptureSession()
try self.configureAssetWriter()
DispatchQueue.main.async {
self.configurePreview()
}
} catch {
DispatchQueue.main.async {
self.showAlert("Unable to configure video output")
}
}
}
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .portrait
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
//MARK: - Capture Session
private func configureCaptureSession() throws {
do {
// configure the session
if captureSession.canSetSessionPreset(AVCaptureSessionPreset640x480) {
captureSession.sessionPreset = AVCaptureSessionPreset640x480
}
// configure capture devices
let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let camInput = try AVCaptureDeviceInput(device: camDevice)
let micInput = try AVCaptureDeviceInput(device: micDevice)
if captureSession.canAddInput(camInput) {
captureSession.addInput(camInput)
}
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
// configure audio/video output
videoOutput = AVCaptureVideoDataOutput()
videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let v = videoOutput {
captureSession.addOutput(v)
}
audioOutput = AVCaptureAudioDataOutput()
audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let a = audioOutput {
captureSession.addOutput(a)
}
// configure audio session
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioSession.setActive(true)
var micPort: AVAudioSessionPortDescription?
if let inputs = audioSession.availableInputs {
for port in inputs {
if port.portType == AVAudioSessionPortBuiltInMic {
micPort = port
break;
}
}
}
if let port = micPort, let dataSources = port.dataSources {
for source in dataSources {
if source.orientation == AVAudioSessionOrientationFront {
try audioSession.setPreferredInput(port)
break
}
}
}
} catch {
print("Failed to configure audio/video capture session")
throw error
}
}
private func configureAssetWriter() throws {
prepareVideoFile()
do {
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
guard let writer = assetWriter else {
print("Asset writer not created")
return
}
let vidSize = videoSize
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: NSNumber(value: Float(vidSize.width)),
AVVideoHeightKey: NSNumber(value: Float(vidSize.height))]
videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
videoInput?.expectsMediaDataInRealTime = true
videoInput?.transform = getVideoTransform()
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2]
audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
audioInput?.expectsMediaDataInRealTime = true
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
if writer.canAdd(vi) {
writer.add(vi)
}
if writer.canAdd(ai) {
writer.add(ai)
}
} catch {
print("Failed to configure asset writer")
throw error
}
}
private func prepareVideoFile() {
if FileManager.default.fileExists(atPath: outputUrl.path) {
do {
try FileManager.default.removeItem(at: outputUrl)
} catch {
print("Unable to remove file at URL \(outputUrl)")
}
}
if !FileManager.default.fileExists(atPath: outputDirectory.path) {
do {
try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
} catch {
print("Unable to create directory at URL \(outputDirectory)")
}
}
}
private func configurePreview() {
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
previewLayer.frame = previewView.bounds
previewView.layer.addSublayer(previewLayer)
}
}
private func getVideoSize() -> CGSize {
if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {
if videoSize.width > videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
} else {
if videoSize.width < videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
}
}
private func getVideoTransform() -> CGAffineTransform {
switch UIDevice.current.orientation {
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * -90.0)) / 180.0)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: Add support for front facing camera
// return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: For front facing camera
case .landscapeRight:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: Add support for front facing camera
// return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: For front facing camera
default:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * 90.0)) / 180.0)
}
}
//MARK: - Controls
private func startRecording() {
videoQueue.async {
self.captureSession.startRunning()
}
isRecording = true
playStopButton.setTitle("Stop Recording", for: .normal)
print("Recording did start")
}
private func stopRecording() {
if !isRecording {
return
}
videoQueue.async {
self.assetWriter?.finishWriting {
print("Asset writer did finish writing")
self.isWriting = false
}
self.captureSession.stopRunning()
}
isRecording = false
playStopButton.setTitle("Start Recording", for: .normal)
print("Recording did stop")
}
//MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let w = assetWriter else {
print("Asset writer not configured")
return
}
guard let vo = videoOutput else {
print("Video output not configured")
return
}
guard let ao = audioOutput else {
print("Audio output not configured")
return
}
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("Writer status \(w.status.rawValue)")
if let e = w.error {
print("Writer error \(e)")
stopRecording()
return
}
switch w.status {
case .unknown:
if !isWriting {
isWriting = true
w.startWriting()
w.startSession(atSourceTime: st)
}
return
case .completed:
print("Video writing completed")
return
case .cancelled:
print("Video writing cancelled")
return
case .failed:
print("Video writing failed")
return
default:
print("Video is writing")
}
if vo == captureOutput {
if !vi.append(sampleBuffer) {
print("Unable to write to video buffer")
}
} else if ao == captureOutput {
if !ai.append(sampleBuffer) {
print("Unable to write to audio buffer")
}
}
}
//MARK: Helpers
private func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//MARK: Actions
@IBAction func startStopTapped(sender: AnyObject) {
if isRecording {
stopRecording()
} else {
startRecording()
}
}
}