我正在尝试使用 AVAssetWriter 捕捉电影,在 iphone 5 中一切正常,捕捉和保存电影就像一个魅力。
但是当我尝试在 iphone 4 中捕获电影时,样本缓冲区会跳过一些帧并且电影不好。
所以,这是我的代码:
- (void) initCaptureSession{
// openSession and set quality to 1280x720
session = [[AVCaptureSession alloc] init];
if([session canSetSessionPreset:AVCaptureSessionPreset640x480]) session.sessionPreset = AVCaptureSessionPresetHigh;
// get devices for audio and video
deviceVideo = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
deviceAudio = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
// create input of audio and video
inputVideo = [AVCaptureDeviceInput deviceInputWithDevice:deviceVideo error:&error];
if (!inputVideo) NSLog(@"ERROR: trying to open camera: %@", error);
inputAudio = [AVCaptureDeviceInput deviceInputWithDevice:deviceAudio error:&error];
if (!inputAudio) NSLog(@"ERROR: trying to open audio: %@", error);
// CMTime maxDuration = CMTimeMake(60, 1);
// create output audio and video
outputVideo = [[AVCaptureVideoDataOutput alloc] init];
outputVideo.alwaysDiscardsLateVideoFrames = NO;
outputVideo.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
outputAudio = [[AVCaptureAudioDataOutput alloc] init];
// add inputs and outputs in the current session
[session beginConfiguration];
if ([session canAddInput:inputVideo])[session addInput:inputVideo];
if ([session canAddInput:inputAudio])[session addInput:inputAudio];
if ([session canAddOutput:outputVideo]) [session addOutput:outputVideo];
if ([session canAddOutput:outputAudio]) [session addOutput:outputAudio];
[session commitConfiguration];
// tourn of the torch
[deviceVideo lockForConfiguration:&error];
if([deviceVideo hasTorch] && [deviceVideo isTorchModeSupported:AVCaptureTorchModeOff]) [deviceVideo setTorchMode:AVCaptureTorchModeOff];
[deviceVideo unlockForConfiguration];
[self configDevice];
// create the preview view to show the video
captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[captureVideoPreviewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
captureVideoPreviewLayer.frame = viewPreview.bounds;
[viewPreview.layer addSublayer:captureVideoPreviewLayer];
CALayer *viewLayer = viewPreview.layer;
[viewLayer setMasksToBounds:YES];
[captureVideoPreviewLayer setFrame:[viewLayer bounds]];
[viewLayer addSublayer:captureVideoPreviewLayer];
// dispatch outputs to delegate in a queue
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[outputVideo setSampleBufferDelegate:self queue:queue];
[outputAudio setSampleBufferDelegate:self queue:queue];
// dispatch_release(queue);
[session startRunning];
}
-(BOOL) setupWriter{
urlOutput = [self tempFileURL];
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:urlOutput fileType:AVFileTypeMPEG4 error:&error];
NSParameterAssert(videoWriter);
// Add metadata
NSArray *existingMetadataArray = videoWriter.metadata;
NSMutableArray *newMetadataArray = nil;
if (existingMetadataArray) {
newMetadataArray = [existingMetadataArray mutableCopy];
} else {
newMetadataArray = [[NSMutableArray alloc] init];
}
AVMutableMetadataItem *mutableItemLocation = [[AVMutableMetadataItem alloc] init];
mutableItemLocation.keySpace = AVMetadataKeySpaceCommon;
mutableItemLocation.key = AVMetadataCommonKeyLocation;
mutableItemLocation.value = [NSString stringWithFormat:@"%+08.4lf%+09.4lf/", location.latitude, location.longitude];
AVMutableMetadataItem *mutableItemModel = [[AVMutableMetadataItem alloc] init];
mutableItemModel.keySpace = AVMetadataKeySpaceCommon;
mutableItemModel.key = AVMetadataCommonKeyModel;
mutableItemModel.value = [[UIDevice currentDevice] model];
[newMetadataArray addObject:mutableItemLocation];
[newMetadataArray addObject:mutableItemModel];
videoWriter.metadata = newMetadataArray;
// video Configuration
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:640], AVVideoCleanApertureWidthKey,
[NSNumber numberWithInt:360], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1], AVVideoPixelAspectRatioHorizontalSpacingKey,
[NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1024000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:90],AVVideoMaxKeyFrameIntervalKey,
videoCleanApertureSettings, AVVideoCleanApertureKey,
videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:360], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
// if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
// } else {
// // should work on any device requires more space
// audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
// [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
// [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
// [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
// [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
// [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
// nil ];
// }
audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: audioOutputSettings];
audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[videoWriter addInput:videoWriterInput];
[videoWriter addInput:audioWriterInput];
return YES;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if( !CMSampleBufferDataIsReady(sampleBuffer) ){
NSLog( @"sample buffer is not ready. Skipping sample" );
return;
}
if(isRecording == YES ){
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if(videoWriter.status != AVAssetWriterStatusWriting ){
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:lastSampleTime];
}
if( captureOutput == outputVideo ){
[self newVideoSample:sampleBuffer];
} else if( captureOutput == outputAudio) {
[self newAudioSample:sampleBuffer];
}
}
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer{
if( isRecording ){
if( videoWriter.status > AVAssetWriterStatusWriting ) {
NSLog(@"Warning: writer status is %d", videoWriter.status);
if( videoWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", videoWriter.error);
return;
}
while (!videoWriterInput.readyForMoreMediaData) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
if( ![videoWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(@"Unable to write to video input");
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer{
if( isRecording ){
if( videoWriter.status > AVAssetWriterStatusWriting ) {
NSLog(@"Warning: writer status is %d", videoWriter.status);
if( videoWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", videoWriter.error);
return;
}
while (!audioWriterInput.readyForMoreMediaData) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
if( ![audioWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(@"Unable to write to audio input");
}
}
-(void) startVideoRecording {
if( !isRecording ){
NSLog(@"start video recording...");
if( ![self setupWriter] ) {
NSLog(@"Setup Writer Failed") ;
return;
}
isRecording = YES;
recorded = NO;
}
}
-(void) stopVideoRecording {
if( isRecording ) {
isRecording = NO;
btRecord.hidden = NO;
btRecording.hidden = YES;
[timerToRecord invalidate];
timerToRecord = nil;
// [session stopRunning];
[videoWriter finishWritingWithCompletionHandler:^{
if (videoWriter.status != AVAssetWriterStatusFailed && videoWriter.status == AVAssetWriterStatusCompleted) {
videoWriterInput = nil;
audioWriterInput = nil;
videoWriter = nil;
NSLog(@"finishWriting returned succeful");
recorded = YES;
} else {
NSLog(@"finishWriting returned unsucceful") ;
}
}];
NSLog(@"video recording stopped");
[self performSelector:@selector(openPlayer) withObject:nil afterDelay:0.5];
}
}
当我删除此行时:
while (!audioWriterInput.readyForMoreMediaData) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
我收到了这个错误:
* 由于未捕获的异常“NSInternalInconsistencyException”而终止应用程序,原因:“* -[AVAssetWriterInput appendSampleBuffer:] 当 readyForMoreMediaData 为 NO 时,无法附加样本缓冲区。”
在 iphone 5 中我没有使用这个循环。
我在这里阅读了一些示例,但我不明白如何在 iphone 4 中使电影更流畅。
如果有人有使用 AVAssetWriter 为 iphone 3gs、iphone 4、iphone 4s 和 iphone 5 制作电影的建议或完整示例,我将非常感谢。
谢谢