0

我正在开发一个 iOS 应用程序,它使用后置摄像头录制视频。我已经设法让预览层正常工作。但是,如果我单击录制按钮,预览会冻结。

以下是我的代码。请帮我解决这个问题。

Pg5VideoViewController.h

@interface Pg5VideoViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate> {
    BOOL WeAreRecording;
    IBOutlet UIView *videoViewBg;
    AVCaptureSession *_captureSession;
    UIImageView *_imageView;
    CALayer *_customLayer;
    AVCaptureVideoPreviewLayer *_prevLayer;
    UIColor *pickedColor;
    AVCaptureMovieFileOutput *movieFileOutput;
    IBOutlet UIView *theColor;
}
@property (nonatomic,retain) IBOutlet UIView *theColor;
@property (nonatomic,retain) UIColor *pickedColor;
@property (nonatomic,retain) IBOutlet UIView *videoViewBg;
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
@property (nonatomic, retain) AVCaptureMovieFileOutput *movieFileOutput;
-(void)initCapture;

-(UIColor *) colorOfPoint:(CGPoint)point;
-(IBAction)takeVideo:(id)sender;
@end

Pg5VideoViewController.m 文件:

@implementation Pg5VideoViewController

@synthesize videoViewBg;
@synthesize captureSession = _captureSession;
@synthesize imageView = _imageView;
@synthesize customLayer = _customLayer;
@synthesize prevLayer = _prevLayer;
@synthesize pickedColor = _pickedColor;
@synthesize theColor = _theColor;
@synthesize movieFileOutput = _movieFileOutput;

#pragma mark -
#pragma mark Initialization
- (id)init {
    self = [super init];
    if (self) {
        self.imageView = nil;
        self.prevLayer = nil;
        self.customLayer = nil;
    }
    return self;
}

- (void)initCapture {
    AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput 
                                          deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] 
                                          error:nil];
    movieFileOutput = [[AVCaptureVideoDataOutput alloc] init];

    dispatch_queue_t queue;
    queue = dispatch_queue_create("cameraQueue", NULL);
    [movieFileOutput setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);

    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 
    [movieFileOutput setVideoSettings:videoSettings]; 

    self.captureSession = [[AVCaptureSession alloc] init];

    [self.captureSession addInput:captureInput];
    [self.captureSession addOutput:movieFileOutput];

    [self.captureSession setSessionPreset:AVCaptureSessionPresetMedium];

    self.customLayer = [CALayer layer];
    self.customLayer.frame = CGRectMake(42, 40, 940, 558);

    //self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
    //self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
    [self.view.layer addSublayer:self.customLayer];

    [self.captureSession startRunning];

}

#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput 
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer 
       fromConnection:(AVCaptureConnection *)connection 
{

    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 

    CVPixelBufferLockBaseAddress(imageBuffer,0); 

    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer);  

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage = CGBitmapContextCreateImage(newContext); 

    CGContextRelease(newContext); 
    CGColorSpaceRelease(colorSpace);

    [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage waitUntilDone:YES];

    UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];

    CGImageRelease(newImage);

    [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];

    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    [pool drain];
} 

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput 
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL 
      fromConnections:(NSArray *)connections 
                error:(NSError *)error
{

    NSLog(@"didFinishRecordingToOutputFileAtURL - enter");

    BOOL RecordedSuccessfully = YES;
    if ([error code] != noErr)
    {
        id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
        if (value)
        {
            RecordedSuccessfully = [value boolValue];
        }
    }
    if (RecordedSuccessfully)
    {
        NSLog(@"didFinishRecordingToOutputFileAtURL - success");
        ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
        if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
        {
            [library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
                                        completionBlock:^(NSURL *assetURL, NSError *error)
             {
                 if (error)
                 {

                 }
             }];
        }

        [library release];      

    }
}



- (void)viewDidAppear:(BOOL)animated {

}

- (IBAction)takeVideo:(id)sender {
    AVCaptureMovieFileOutput *movieFileOutput1 = [[AVCaptureMovieFileOutput alloc] init];

    if(!WeAreRecording) {
        NSLog(@"START RECORDING");
        WeAreRecording = YES;
        self.videoViewBg.backgroundColor = [UIColor redColor];

        NSDateFormatter *formatter;
        NSString *dateString;
        formatter = [[NSDateFormatter alloc]init];
        [formatter setDateFormat:@"dd-MM-yyyy HH:mm:ss"];
        dateString = [formatter stringFromDate:[NSDate date]];
        [formatter release];

        NSLog(@"The dateString is : %@",dateString);

        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *documentsDirectoryPath = [paths objectAtIndex:0];
        NSString *movieFileName = [NSString stringWithFormat: @"%@.mp4",dateString];
        NSString *filePath = [documentsDirectoryPath stringByAppendingPathComponent:movieFileName];

        NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:filePath];
        [self.captureSession stopRunning];
        [self.captureSession beginConfiguration];
//      [self.captureSession removeOutput:movieFileOutput];

        if([self.captureSession canAddOutput:movieFileOutput1])
        {
            [self.captureSession addOutput:movieFileOutput1];
        }
        else
        {
            NSLog(@"Couldn't add still output");
        }

        [movieFileOutput1 startRecordingToOutputFileURL:outputURL recordingDelegate:self];
        [self.captureSession commitConfiguration];
        [self.captureSession startRunning];
        [outputURL release];        
    } else {
        NSLog(@"STOP RECORDING");
        WeAreRecording = NO;
        self.videoViewBg.backgroundColor = [UIColor whiteColor];
        [movieFileOutput1 stopRecording];
        [self.captureSession removeOutput:movieFileOutput1];
    }
}

-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
    UITouch *touch = [[event allTouches] anyObject];
    CGPoint loc = [touch locationInView:self.view];
    self.pickedColor = [self colorOfPoint:loc];
    self.theColor.backgroundColor = self.pickedColor;
}

-(UIColor *) colorOfPoint:(CGPoint)point {
    unsigned char pixel[4] = {0};
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pixel, 1, 1, 8, 4, colorSpace, kCGImageAlphaPremultipliedLast);

    CGContextTranslateCTM(context, -point.x, -point.y);

    [self.view.layer renderInContext:context];

    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    UIColor *color = [UIColor colorWithRed:pixel[0]/255.0 green:pixel[1]/255.0 blue:pixel[2]/255.0 alpha:pixel[3]/255.0];

    return color;
}


// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
    [super viewDidLoad];
    [self initCapture];
    WeAreRecording = NO;
    self.videoViewBg.layer.cornerRadius = 55;

}
// Override to allow orientations other than the default portrait orientation.
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
    if(interfaceOrientation == UIInterfaceOrientationLandscapeRight) {
        return YES;
    }
    return NO;
}

- (void)didReceiveMemoryWarning {
    // Releases the view if it doesn't have a superview.
    [super didReceiveMemoryWarning];

    // Release any cached data, images, etc. that aren't in use.
}

- (void)viewDidUnload {
    [super viewDidUnload];
    self.imageView = nil;
    self.customLayer = nil;
    self.prevLayer = nil;
    [self.captureSession stopRunning];

    // Release any retained subviews of the main view.
    // e.g. self.myOutlet = nil;
}


- (void)dealloc {
    [movieFileOutput release];
    [self.captureSession release];
    [super dealloc];
}

@结尾

请帮忙

4

1 回答 1

3

这里的问题不是微不足道的。AVFoundation根本无法同时处理AVCaptureMovieFileOutput两者AVCaptureVideoDataOutputAVCaptureVideoDataOutput这意味着您在录制(需要 )时不能显示预览(需要AVCaptureMovieFileOutput)。这很愚蠢,但这就是生活。

我知道如何做到这一点的唯一方法是仅AVCaptureVideoDataOutput在内部使用captureOutput:didOutputSampleBuffer:fromConnection:手动将帧写入视频文件。以下代码片段应该会有所帮助

特性

@property (strong, nonatomic) AVAssetWriter* recordingAssetWriter;
@property (strong, nonatomic) AVAssetWriterInput* recordingAssetWriterInput;
@property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor* recordingPixelBufferAdaptor;

初始化视频文件(当你开始录制什么的时候)

// Init AVAssetWriter
NSError* error = nil;
self.recordingAssetWriter = [[AVAssetWriter alloc] initWithURL:<the video file URL> fileType:AVFileTypeMPEG4 error:&error];

// Init AVAssetWriterInput & AVAssetWriterInputPixelBufferAdaptor
NSDictionary* settings = @{AVVideoWidthKey: @(480), AVVideoHeightKey: @(640), AVVideoCodecKey: AVVideoCodecH264};

self.recordingAssetWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:settings];
self.recordingAssetWriterInput.expectsMediaDataInRealTime = YES;
self.recordingPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:self.recordingAssetWriterInput sourcePixelBufferAttributes:@{(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}];

// Add Input
[self.recordingAssetWriter addInput:self.recordingAssetWriterInput];

// Start ...
_recording = YES;

将帧写入视频文件

// Inside the captureOutput:didOutputSampleBuffer:fromConnection: delegate method
// _recording is the flag to see if we're recording
if (_recording) {
    CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

    if (self.recordingAssetWriter.status != AVAssetWriterStatusWriting) {
        [self.recordingAssetWriter startWriting];
    [self.recordingAssetWriter startSessionAtSourceTime:sampleTime];
    }

    if (self.recordingAssetWriterInput.readyForMoreMediaData) {
        [self.recordingPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:sampleTime];
    }
}

完成录制后完成视频文件:

[self.recordingAssetWriterInput markAsFinished];
[self.recordingAssetWriter finishWritingWithCompletionHandler:^{
    // Do not do this immediately after calling finishWritingWithCompletionHandler, since it is an async method
    self.recordingAssetWriter = nil;
    self.recordingAssetWriterInput = nil;
    self.recordingPixelBufferAdaptor = nil;             
}];

请注意,为了清楚起见,我省略了错误检查。

于 2013-06-25T03:47:09.047 回答