我有一段代码从相机设置捕获会话以使用 OpenCV 处理帧,然后使用从帧生成的 UIImage 设置 UIImageView 的图像属性。当应用程序启动时,图像视图的图像为 nil 并且没有帧显示,直到我将另一个视图控制器推入堆栈然后将其弹出。然后图像保持不变,直到我再做一次。NSLog 语句显示回调是以大约正确的帧速率调用的。任何想法为什么它不显示?我将帧率一直降低到每秒 2 帧。处理速度不够快吗?
这是代码:
- (void)setupCaptureSession {
NSError *error = nil;
// Create the session
AVCaptureSession *session = [[AVCaptureSession alloc] init];
// Configure the session to produce lower resolution video frames, if your
// processing algorithm can cope. We'll specify medium quality for the
// chosen device.
session.sessionPreset = AVCaptureSessionPresetLow;
// Find a suitable AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (!input) {
// Handling the error appropriately.
}
[session addInput:input];
// Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
output.alwaysDiscardsLateVideoFrames = YES;
[session addOutput:output];
// Configure your output.
dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Specify the pixel format
output.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
output.minFrameDuration = CMTimeMake(1, 1);
// Start the session running to start the flow of data
[session startRunning];
// Assign session to an ivar.
[self setSession:session];
}
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer,0);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (!colorSpace)
{
NSLog(@"CGColorSpaceCreateDeviceRGB failure");
return nil;
}
// Get the base address of the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the data size for contiguous planes of the pixel buffer.
size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);
// Create a Quartz direct-access data provider that uses data we supply
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, bufferSize,
NULL);
// Create a bitmap image from data supplied by our data provider
CGImageRef cgImage =
CGImageCreate(width,
height,
8,
32,
bytesPerRow,
colorSpace,
kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little,
provider,
NULL,
true,
kCGRenderingIntentDefault);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
// Create and return an image object representing the specified Quartz image
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
return image;
}
// Delegate routine that is called when a sample buffer was written
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
// Create a UIImage from the sample buffer data
UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
[self.delegate cameraCaptureGotFrame:image];
}