我正在使用屏幕录像机来捕捉屏幕。当一个视图被填充到 iphone 屏幕时,它可以完美地工作。当AVCaptureVideoPreviewLayer
显示带有覆盖按钮时,保存的屏幕捕获视频显示覆盖按钮,没有AVCaptureVideoPreviewLayer
. 我已使用本教程添加叠加层。如何解决这个问题?
问问题
1521 次
1 回答
1
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
@autoreleasepool {
if ([connection isVideoOrientationSupported])
[connection setVideoOrientation:[self cameraOrientation]];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
/*We release some components*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
image1= [UIImage imageWithCGImage:newImage];
/*We relase the CGImageRef*/
CGImageRelease(newImage);
dispatch_sync(dispatch_get_main_queue(), ^{
[self.imageView setImage:image1];
});
}
}
运行writeaSample
使用NSTimer
.
-(void) writeSample: (NSTimer*) _timer {
if (assetWriterInput.readyForMoreMediaData) {
// CMSampleBufferRef sample = nil;
@autoreleasepool {
CVReturn cvErr = kCVReturnSuccess;
// get screenshot image!
UIGraphicsBeginImageContext(baseViewOne.frame.size);
[[baseViewOne layer] renderInContext:UIGraphicsGetCurrentContext()];
screenshota = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
//CGImageRef image = (CGImageRef) [[self screenshot] CGImage];
CGImageRef image = (CGImageRef) [screenshota CGImage];
//NSLog (@"made screenshot");
// prepare the pixel buffer
CVPixelBufferRef pixelBuffer = NULL;
CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
//NSLog (@"copied image data");
cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
baseViewOne.frame.size.width,baseViewOne.frame.size.height,
kCVPixelFormatType_32BGRA,
(void*)CFDataGetBytePtr(imageData),
CGImageGetBytesPerRow(image),
NULL,
NULL,
NULL,
&pixelBuffer);
//NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);
// calculate the time
CMTime presentationTime;
CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
elapsedTime = thisFrameWallClockTime - (firstFrameWallClockTime+pausedFrameTime);
// NSLog (@"elapsedTime: %f", elapsedTime);
presentationTime = CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);
BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
if (appended) {
CVPixelBufferRelease( pixelBuffer );
CFRelease(imageData);
pixelBuffer = nil;
//NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
} else {
[self stopRecording];
}
}
}
}
于 2013-12-06T04:13:22.700 回答