我想像立方体狗的工作原理一样实时捕获带有叠加层的图像 -按照此处http://www.musicalgeometry.com/?p=1681完成下面的代码
我知道如何在预览层中叠加图像并捕获图像,我查看了 Apple 示例代码,如果在相机胶卷中检测到人脸,它会保存一个红色方框。
编辑:
我想将其保存为 1920 X 1080 用于后置摄像头和 1280 X 960,下面的代码可以实时保存叠加层和图像,但是对齐已关闭,我不知道为什么有人可以帮忙?
干杯
这是预览层
这是在捕获之后
- (id)init {
if ((self = [super init])) {
[self setCaptureSession:[[AVCaptureSession alloc] init]];
[self.captureSession setSessionPreset:AVCaptureSessionPresetHigh];
}
NSLog(@"init called");
return self;
}
-(void)takePictureWithOverlay:(UIImage*)overlay andRect:(CGRect)overlayRect
{
// Find out the current orientation and tell the still image output.
AVCaptureConnection *stillImageConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
//UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation];
// AVCaptureVideoOrientation avcaptureOrientation = [self avOrientationForDeviceOrientation:curDeviceOrientation];
[stillImageConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
[stillImageConnection setVideoScaleAndCropFactor:self.effectiveScale];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (error) {
[self displayErrorOnMainQueue:error withMessage:@"Take picture failed"];
}
else {
// trivial simple JPEG case
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:jpegData];
/////
CGSize imageSize = [image size];
CGSize overlaySize = [overlay size];
UIGraphicsBeginImageContext(imageSize);
[image drawInRect:CGRectMake(0, 0, imageSize.width, imageSize.height)];
NSLog(@"aaa %f", [UIScreen mainScreen].applicationFrame.size.width);
NSLog(@"aaa %f", [UIScreen mainScreen].applicationFrame.size.height);
NSLog(@"aaa %f", [[UIScreen mainScreen] bounds].size.height);
CGFloat xScaleFactor = imageSize.width / 320;//320;
CGFloat yScaleFactor = imageSize.height / 568;//480;//568;
NSLog(@"xScaleFactor size %F",xScaleFactor);
NSLog(@"yScaleFactor size %F",yScaleFactor);
//144 for 568
[overlay drawInRect:CGRectMake(overlayRect.origin.x * xScaleFactor, overlayRect.origin.y*yScaleFactor
, overlaySize.width * xScaleFactor, overlaySize.height * yScaleFactor)]; // rect used in AROverlayViewController was (30,100,260,200)
UIImage *combinedImage = UIGraphicsGetImageFromCurrentImageContext();
[self setStillImage:combinedImage];
UIGraphicsEndImageContext();
/////
}
[[NSNotificationCenter defaultCenter] postNotificationName:kImageCapturedSuccessfully object:nil];
}];
}