我正在使用 AVCaptureSession 使用 AVCaptureVideoDataOutput 类中的 setSampleBufferDelegate 方法从相机捕获帧。委托方法如下所示。可以看到我转换为 UIImage 并放在 UIImageView 中。我想将每个 UIImage 保存到磁盘并将 URL 存储在新的 managedObject 中,但我不知道如何正确获取 managedObjectContext,因为每次调用都会使用串行调度队列生成一个新线程。任何人都可以提出一个使用 CoreData 和调度队列的解决方案,以便我可以构建存储在磁盘上并对应于 managedObject 的图像集合。
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
/*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly).
Same thing as for the CALayer we are not in the main thread so ...*/
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
/*We relase the CGImageRef*/
CGImageRelease(newImage);
[self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}