我正在制作屏幕视频,但在这条线上崩溃。
CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);
注意:如果像素缓冲区是连续的并且具有与输入数据相同的 bytesPerRow ,它将起作用
所以我提供了从相机中抓取帧的代码 - 也许它会有所帮助 在抓取数据后,我将它放在队列中以进行进一步处理。我不得不删除一些代码,因为它与你无关——所以你在这里看到的部分应该是你可以使用的。
- (void)captureOutput:(AVCaptureVideoDataOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
@autoreleasepool {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
//NSLog(@"PE: value=%lld timeScale=%d flags=%x", prStamp.value, prStamp.timescale, prStamp.flags);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
NSRange captureRange;
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
// Note Apple sample code cheats big time - the phone is big endian so this reverses the "apparent" order of bytes
CGContextRef newContext = CGBitmapContextCreate(NULL, width, captureRange.length, 8, bytesPerRow, colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little); // Video in ARGB format
assert(newContext);
uint8_t *newPtr = (uint8_t *)CGBitmapContextGetData(newContext);
size_t offset = captureRange.location * bytesPerRow;
memcpy(newPtr, baseAddress + offset, captureRange.length * bytesPerRow);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CMTime prStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); // when it was taken?
//CMTime deStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer); // now?
NSDictionary *dict = [NSDictionary dictionaryWithObjectsAndKeys:
[NSValue valueWithBytes:&saveState objCType:@encode(saveImages)], kState,
[NSValue valueWithNonretainedObject:(__bridge id)newContext], kImageContext,
[NSValue valueWithBytes:&prStamp objCType:@encode(CMTime)], kPresTime,
nil ];
dispatch_async(imageQueue, ^
{
// could be on any thread now
OSAtomicDecrement32(&queueDepth);
if(!isCancelled) {
saveImages state; [(NSValue *)[dict objectForKey:kState] getValue:&state];
CGContextRef context; [(NSValue *)[dict objectForKey:kImageContext] getValue:&context];
CMTime stamp; [(NSValue *)[dict objectForKey:kPresTime] getValue:&stamp];
CGImageRef newImageRef = CGBitmapContextCreateImage(context);
CGContextRelease(context);
UIImageOrientation orient = state == saveOne ? UIImageOrientationLeft : UIImageOrientationUp;
UIImage *image = [UIImage imageWithCGImage:newImageRef scale:1.0 orientation:orient]; // imageWithCGImage: UIImageOrientationUp UIImageOrientationLeft
CGImageRelease(newImageRef);
NSData *data = UIImagePNGRepresentation(image);
// NSLog(@"STATE:[%d]: value=%lld timeScale=%d flags=%x", state, stamp.value, stamp.timescale, stamp.flags);
{
NSString *name = [NSString stringWithFormat:@"%d.png", num];
NSString *path = [[wlAppDelegate snippetsDirectory] stringByAppendingPathComponent:name];
BOOL ret = [data writeToFile:path atomically:NO];
//NSLog(@"WROTE %d err=%d w/time %f path:%@", num, ret, (double)stamp.value/(double)stamp.timescale, path);
if(!ret) {
++errors;
} else {
dispatch_async(dispatch_get_main_queue(), ^
{
if(num) [delegate progress:(CGFloat)num/(CGFloat)(MORE_THAN_ONE_REV * SNAPS_PER_SEC) file:path];
} );
}
++num;
}
} else NSLog(@"CANCELLED");
} );
}
}