I am using the captureOutput:didOutputSampleBuffer:fromConnection:
delegate method of AVCaptureVideoDataOutput
. When testing it on the iPad the image buffer size is always 360x480 which seems really strange, I would think it would be the size of the iPad screen.
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
@autoreleasepool {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
NSLog(@"image size: h %zu, w %zu", height, width);
/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CGRect zoom = CGRectMake(self.touchPoint.y, self.touchPoint.x, 120, 120);
CGImageRef newImage2 = CGImageCreateWithImageInRect(newImage, zoom);
/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage* zoomedImage = [[UIImage alloc] initWithCGImage:newImage2 scale:1.0 orientation:UIImageOrientationUp];
[self.zoomedView.layer performSelectorOnMainThread:@selector(setContents:) withObject:(__bridge id)zoomedImage.CGImage waitUntilDone:YES];
CGImageRelease(newImage);
CGImageRelease(newImage2);
}
}//end
Is there a reason why the image buffer would be so small, even on iPad?