我正在尝试让视频与 AVFoundation 一起使用。目前我只有一个代码,它应该从相机获取输入并将其显示在屏幕上的 UIImage 上。但是,我只是得到一个白屏。
-(CIContext*)context{
if(!_context){
_context = [CIContext contextWithOptions:nil];
}
return _context;
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
CVPixelBufferRef pb = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pb];
CGImageRef ref = [self.context createCGImage:ciImage fromRect:ciImage.extent];
self.imageView.image = [UIImage imageWithCGImage:ref scale:1.0 orientation:UIImageOrientationRight];
CGImageRelease(ref);
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
self.session = [[AVCaptureSession alloc] init];
//the resolution of the capturing session
self.session.sessionPreset = AVCaptureSessionPreset352x288;
self.videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:
self.videoDevice error:nil];
self.frameOutput = [[AVCaptureVideoDataOutput alloc] init];
self.frameOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[self.session addInput:self.videoInput];
[self.session addOutput:self.frameOutput];
[self.frameOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[self.session startRunning];
}
有人可以帮忙吗?