我正在制作一个应用程序以设备最大帧速率录制视频(即,iPhone 5s 为 120fps,6 和 6s 为 240)。我已经设法配置AVCaptureDevice
设置maxFrameRateDuration
,我打印到日志currentDevice.activeFormat.videoSupportedFrameRateRanges
,一切都很好。
但是当我尝试保存视频时,它确实保存了它,但以正常帧速率,而不是 120 或 240fps。
拜托,谁能帮我解决这个问题?任何帮助将非常感激。
提前致谢。
PS:这是我到目前为止的完整代码
import UIKit
import AVFoundation
import AVKit
import AssetsLibrary
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
@IBOutlet weak var cameraButton:UIButton!
let captureSession = AVCaptureSession()
var currentDevice:AVCaptureDevice?
var videoFileOutput:AVCaptureMovieFileOutput?
var cameraPreviewLayer:AVCaptureVideoPreviewLayer?
var outputPath: String = ""
var backgroundRecordId: UIBackgroundTaskIdentifier = UIBackgroundTaskInvalid
var isRecording = false
override func viewDidLoad() {
super.viewDidLoad()
// Preset the session for taking photo in full resolution
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// Get the available devices that is capable of taking video
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
// Get the back-facing camera for taking videos
for device in devices {
if device.position == AVCaptureDevicePosition.Back {
currentDevice = device
configureDevice()
}
}
let captureDeviceInput:AVCaptureDeviceInput
do {
captureDeviceInput = try AVCaptureDeviceInput(device: currentDevice)
} catch {
print(error)
return
}
// Configure the session with the output for capturing video
videoFileOutput = AVCaptureMovieFileOutput()
// Configure the session with the input and the output devices
captureSession.addInput(captureDeviceInput)
captureSession.addOutput(videoFileOutput)
// Provide a camera preview
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(cameraPreviewLayer!)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
cameraPreviewLayer?.frame = view.layer.frame
// Bring the camera button to front
view.bringSubviewToFront(cameraButton)
captureSession.startRunning()
}
func configureDevice() {
var bestFormat: AVCaptureDeviceFormat? = nil
var bestFrameRateRange: AVFrameRateRange? = nil
var bestPixelArea: Int32 = 0
for format in currentDevice!.formats {
let dims: CMVideoDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription)
let pixelArea: Int32 = dims.width * dims.height
let ranges = format.videoSupportedFrameRateRanges as! [AVFrameRateRange];
for range in ranges {
//print ("[",dims.width,",",dims.width,"] : ",range.maxFrameRate);
if bestFrameRateRange==nil || range.maxFrameRate > bestFrameRateRange!.maxFrameRate || ((range.maxFrameRate == bestFrameRateRange!.maxFrameRate) && (pixelArea > bestPixelArea)) {
bestFormat = format as? AVCaptureDeviceFormat
bestFrameRateRange = range
bestPixelArea = pixelArea
}
}
}
do {
try currentDevice!.lockForConfiguration() {
currentDevice!.activeFormat = bestFormat
currentDevice!.activeVideoMinFrameDuration = bestFrameRateRange!.minFrameDuration
currentDevice!.activeVideoMaxFrameDuration = bestFrameRateRange!.minFrameDuration
}
catch{}
print(currentDevice!.activeFormat.videoSupportedFrameRateRanges)
currentDevice!.unlockForConfiguration()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - AVCaptureFileOutputRecordingDelegate methods
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
if error != nil {
print(error)
return
}
let backgroundRecordId: UIBackgroundTaskIdentifier = self.backgroundRecordId
self.backgroundRecordId = UIBackgroundTaskInvalid
ALAssetsLibrary().writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: {
(assetURL:NSURL!, error:NSError!) in
if error != nil{
print(error)
}
do {
try NSFileManager.defaultManager().removeItemAtURL(outputFileURL)
} catch _ {
}
if backgroundRecordId != UIBackgroundTaskInvalid {
UIApplication.sharedApplication().endBackgroundTask(backgroundRecordId)
}
})
performSegueWithIdentifier("playVideo", sender: outputFileURL)
}
// MARK: - Segue methods
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
if segue.identifier == "playVideo" {
let videoPlayerViewController = segue.destinationViewController as! AVPlayerViewController
let videoFileURL = sender as! NSURL
videoPlayerViewController.player = AVPlayer(URL: videoFileURL)
}
}
// MARK: - Action methods
@IBAction func unwindToCamera(segue:UIStoryboardSegue) {
}
@IBAction func capture(sender: AnyObject) {
if !isRecording {
isRecording = true
UIView.animateWithDuration(0.5, delay: 0.0, options: [.Repeat, .Autoreverse, .AllowUserInteraction], animations: { () -> Void in
self.cameraButton.transform = CGAffineTransformMakeScale(0.5, 0.5)
}, completion: nil)
let outputPath: String = NSTemporaryDirectory() + "output.mov"
let outputFileURL = NSURL(fileURLWithPath: outputPath)
videoFileOutput?.startRecordingToOutputFileURL(outputFileURL, recordingDelegate: self)
} else {
isRecording = false
UIView.animateWithDuration(0.5, delay: 1.0, options: [], animations: { () -> Void in
self.cameraButton.transform = CGAffineTransformMakeScale(1.0, 1.0)
}, completion: nil)
cameraButton.layer.removeAllAnimations()
videoFileOutput?.stopRecording()
}
}
}