奇怪的问题。我从视频文件 (.mov) 中获取帧,并使用 AVAssetWriter 将它们写入另一个文件,而无需任何显式处理。实际上,我只是将帧从一个内存缓冲区复制到另一个内存缓冲区,然后它们通过 PixelbufferAdaptor 刷新它们。然后我取出生成的文件,删除原始文件,将生成的文件替换为原始文件并执行相同的操作。有趣的是文件的大小不断增长!有人可以解释为什么吗?
if(adaptor.assetWriterInput.readyForMoreMediaData==YES) {
CVImageBufferRef cvimgRef=nil;
CMTime lastTime=CMTimeMake(fcounter++, 30);
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
CMSampleBufferRef framebuffer=nil;
CGImageRef frameImg=nil;
if ( [asr status]==AVAssetReaderStatusReading ){
framebuffer = [asset_reader_output copyNextSampleBuffer];
frameImg = [self imageFromSampleBuffer:framebuffer withColorSpace:rgbColorSpace];
}
if(frameImg && screenshot){
//CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(framebuffer);
CVReturn stat= CVPixelBufferLockBaseAddress(screenshot, 0);
pxdata=CVPixelBufferGetBaseAddress(screenshot);
bufferSize = CVPixelBufferGetDataSize(screenshot);
// Get the number of bytes per row for the pixel buffer.
bytesPerRow = CVPixelBufferGetBytesPerRow(screenshot);
// Get the pixel buffer width and height.
width = CVPixelBufferGetWidth(screenshot);
height = CVPixelBufferGetHeight(screenshot);
// Create a Quartz direct-access data provider that uses data we supply.
CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, pxdata, bufferSize, NULL);
CGImageAlphaInfo ai=CGImageGetAlphaInfo(frameImg);
size_t bpx=CGImageGetBitsPerPixel(frameImg);
CGColorSpaceRef fclr=CGImageGetColorSpace(frameImg);
// Create a bitmap image from data supplied by the data provider.
CGImageRef cgImage = CGImageCreate(width, height, 8, 32, bytesPerRow,rgbColorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrder32Big,dataProvider, NULL, true, kCGRenderingIntentDefault);
CGDataProviderRelease(dataProvider);
stat= CVPixelBufferLockBaseAddress(finalPixelBuffer, 0);
pxdata=CVPixelBufferGetBaseAddress(finalPixelBuffer);
bytesPerRow = CVPixelBufferGetBytesPerRow(finalPixelBuffer);
CGContextRef context = CGBitmapContextCreate(pxdata, imgsize.width,imgsize.height, 8, bytesPerRow, rgbColorSpace, kCGImageAlphaNoneSkipLast);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(frameImg), CGImageGetHeight(frameImg)), frameImg);
//CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
//CGImageRef myMaskedImage;
const float myMaskingColors[6] = { 0, 0, 0, 1, 0, 0 };
CGImageRef myColorMaskedImage = CGImageCreateWithMaskingColors (cgImage, myMaskingColors);
//CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(myColorMaskedImage), CGImageGetHeight(myColorMaskedImage)), myColorMaskedImage);
[adaptor appendPixelBuffer:finalPixelBuffer withPresentationTime:presentTime];}