我正在尝试制作一个显示相机预览的应用程序,然后在某些情况下开始通过语音输入录制它,最后重复录制的电影。
我已经编写了用于预览/录制/重放的类和管理它们的协调的控制器。
似乎这些功能在独立调用时可以完美运行,但是我无法让它们一起工作:重播视频时,声音会运行,但图像需要大约五秒钟才能显示,然后出现卡顿。
这是我的代码:
预览:
- (void) createSession
{
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:FRONT_CAMERA_ID];
if (!device) device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_cVideoInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
if (!error) [_session addInput:_cVideoInput];
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
error = nil;
_cAudioInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
if (!error) [_session addInput:_cAudioInput];
_cameraLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
_cameraLayer.frame = self.bounds;
[self.layer addSublayer:_cameraLayer];
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[_session setSessionPreset:AVCaptureSessionPreset640x480];
[_videoOutput setVideoSettings:[NSDictionary dictionaryWithContentsOfFile:VIDEO_SETTINGS]];
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
dispatch_queue_t queue = dispatch_queue_create(OUTPUT_QUEUE_NAME, NULL);
[_videoOutput setSampleBufferDelegate:self queue:queue];
[_session addOutput:_videoOutput];
[_audioOutput setSampleBufferDelegate:self queue:queue];
[_session addOutput:_audioOutput];
dispatch_set_context(queue, self);
dispatch_set_finalizer_f(queue, queue_finalizer);
dispatch_release(queue);
[_session startRunning];
}
- (void) deleteSession
{
[_session stopRunning];
[(AVCaptureVideoPreviewLayer *)_cameraLayer setSession:nil];
[_cameraLayer removeFromSuperlayer];
[_cameraLayer release];
_cameraLayer = nil;
[_audioOutput setSampleBufferDelegate:nil queue:NULL];
[_videoOutput setSampleBufferDelegate:nil queue:NULL];
[_audioOutput release];
_audioOutput = nil;
[_videoOutput release];
_videoOutput = nil;
[_cAudioInput release];
_cAudioInput = nil;
[_cVideoInput release];
_cVideoInput = nil;
NSArray *inputs = [_session inputs];
for (AVCaptureInput *input in inputs)
[_session removeInput:input];
NSArray *outputs = [_session outputs];
for (AVCaptureOutput *output in outputs)
[_session removeOutput:output];
[_session release];
_session = nil;
}
记录:
- (void) createWriter
{
NSString *file = [self file];
if ([[NSFileManager defaultManager] fileExistsAtPath:file]) [[NSFileManager defaultManager] removeItemAtPath:file error:NULL];
NSError *error = nil;
_writer = [[AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:file] fileType:AVFileTypeQuickTimeMovie error:&error] retain];
if (error)
{
[_writer release];
_writer = nil;
NSLog(@"%@", error);
return;
}
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
[NSData dataWithBytes:&acl length:sizeof(acl)], AVChannelLayoutKey,
nil ];
_wAudioInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings] retain];
[_writer addInput:_wAudioInput];
settings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
_wVideoInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings] retain];
[_writer addInput:_wVideoInput];
}
- (void) deleteWriter
{
[_wVideoInput release];
_wVideoInput = nil;
[_wAudioInput release];
_wAudioInput = nil;
[_writer release];
_writer = nil;
}
- (void) RecordingAudioWithBuffer:(CMSampleBufferRef)sampleBuffer
{
if (![self canRecordBuffer:sampleBuffer])
return;
if ([_wAudioInput isReadyForMoreMediaData])
[_wAudioInput appendSampleBuffer:sampleBuffer];
}
- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer
{
if (![self canRecordBuffer:sampleBuffer])
return;
if ([_wVideoInput isReadyForMoreMediaData])
[_wVideoInput appendSampleBuffer:sampleBuffer];
}
播放:
- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
AVPlayerItem *item = (AVPlayerItem *)object;
[item removeObserver:self forKeyPath:@"status"];
switch (item.status)
{
case AVPlayerItemStatusReadyToPlay:
[_player seekToTime:kCMTimeZero];
[_player play];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(finishPlaying:) name:AVPlayerItemDidPlayToEndTimeNotification object:item];
break;
case AVPlayerItemStatusUnknown:
case AVPlayerItemStatusFailed:
break;
default:
break;
}
}
- (void) finishPlaying:(NSNotification *)notification
{
[_player pause];
[_playerLayer removeFromSuperlayer];
[_playerLayer release];
_playerLayer = nil;
[_player release];
_player = nil;
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
}
- (void) play:(NSString *)path
{
_player = [[AVPlayer playerWithURL:[NSURL fileURLWithPath:path]] retain];
_playerLayer = [[AVPlayerLayer playerLayerWithPlayer:_player] retain];
_playerLayer.transform = CATransform3DScale(CATransform3DMakeRotation(M_PI_2, 0, 0, 1), 1, -1, 1);
_playerLayer.frame = self.bounds;
[self.layer addSublayer:_playerLayer];
[_player.currentItem addObserver:self forKeyPath:@"status" options:0 context:NULL];
}