在尝试捕获包含 GLKView 和 UIImagePickerController 的屏幕截图之前,我遇到了类似的问题。有时我会出现黑屏,有时我会收到有关无效上下文的投诉(使用与您类似的代码时)。我找不到解决方案,所以我实现了一个 AVFoundation 摄像头,此后再也没有回头。这里有一些快速的源代码可以帮助你。
视图控制器.h
// Frameworks
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
@interface CameraViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate>
// Camera
@property (strong, nonatomic) AVCaptureSession* captureSession;
@property (strong, nonatomic) AVCaptureVideoPreviewLayer* previewLayer;
@property (strong, nonatomic) UIImage* cameraImage;
@end
视图控制器.m
#import "CameraViewController.h"
@implementation CameraViewController
- (void)viewDidLoad
{
[super viewDidLoad];
[self setupCamera];
}
- (void)setupCamera
{
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil];
AVCaptureVideoDataOutput* output = [[AVCaptureVideoDataOutput alloc] init];
output.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
NSString* key = (NSString *) kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[output setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:input];
[self.captureSession addOutput:output];
[self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
// CHECK FOR YOUR APP
self.previewLayer.frame = CGRectMake(0, 0, self.view.frame.size.height, self.view.frame.size.width);
self.previewLayer.orientation = AVCaptureVideoOrientationLandscapeRight;
// CHECK FOR YOUR APP
[self.view.layer insertSublayer:self.previewLayer atIndex:0];
[self.captureSession startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
self.cameraImage = [UIImage imageWithCGImage:newImage];
CGImageRelease(newImage);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}
// Call whenever you need a snapshot
- (UIImage *)snapshot
{
NSLog(@"SNAPSHOT");
return self.cameraImage;
}
@end
此代码根据所选预设(在本例中为照片:852x640)捕获输入图像,因此如果您想将其与视图一起捕获,我建议使用以下选项:
- 捕捉后缩放、裁剪和转换图像。优点:相机仍然运行流畅。缺点:更多代码
- 添加一个 UIImageView 而不是,它会在委托
previewLayer
中更新其图像。captureOutput
优点:所见即所得。缺点:可能会导致您的相机运行速度变慢。
在上述两种情况下,您都需要在截屏后将生成的捕获与其他图像合并(不像听起来那么难)。
AVFoundation 及其相关框架非常令人生畏,因此这是一个非常精简的实现,可以满足您的需求。如果您想了解更多详细信息,请查看以下示例:
希望有帮助!