I'm trying to use AVAssetWriter to write CGImages to a file to create a video from images.
I've gotten this to work successfully in three different ways on the simulator, but every method fails on an iPhone 4 running iOS 4.3.
This all has to do with pixel buffers.
My first method was to just create the pixel buffers as needed without using a pool. That works, but is too memory intensive to work on the device.
My second method was to use the recommended AVAssetWriterInputPixelBufferAdaptor and then pull pixel buffers from the adaptors pixelBufferPool with CVPixelBufferPoolCreatePixelBuffer.
That also works on the simulator, but fails on the device because the adaptor's pixel buffer pool is never allocated. I get no error messages.
Lastly, I attempted to create by own pixel buffer pool with CVPixelBufferPoolCreate. That also works in the simulator but on the device, everything works fine until I try to append the pixel buffer with appendPixelBuffer which fails every time.
I've found very minimal info on this on the web. I've based my code on the examples I've found, but no luck for days now. If ANYONE has experience doing this with AVAssetWriter successfully, please take a look and let me know if you see anything out of place.
NOTE: you will see commented out block of attempts.
First, the setup
- (BOOL) openVideoFile: (NSString *) path withSize:(CGSize)imageSize {
size = CGSizeMake (480.0, 320.0);//imageSize;
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
if (error != nil)
return NO;
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:size.width], AVVideoCleanApertureWidthKey,
[NSNumber numberWithDouble:size.height], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:10], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:10], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1], AVVideoPixelAspectRatioHorizontalSpacingKey,
[NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
//[NSNumber numberWithInt:960000], AVVideoAverageBitRateKey,
// [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey,
videoCleanApertureSettings, AVVideoCleanApertureKey,
videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
//AVVideoProfileLevelH264Main31, AVVideoProfileLevelKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithDouble:size.width], AVVideoWidthKey,
[NSNumber numberWithDouble:size.height], AVVideoHeightKey,
nil];
writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
NSMutableDictionary * bufferAttributes = [[NSMutableDictionary alloc] init];
[bufferAttributes setObject: [NSNumber numberWithInt: kCVPixelFormatType_32ARGB]
forKey: (NSString *) kCVPixelBufferPixelFormatTypeKey];
[bufferAttributes setObject: [NSNumber numberWithInt: 480]
forKey: (NSString *) kCVPixelBufferWidthKey];
[bufferAttributes setObject: [NSNumber numberWithInt: 320]
forKey: (NSString *) kCVPixelBufferHeightKey];
//NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
//[bufferAttributes setObject: [NSNumber numberWithInt: 640]
// forKey: (NSString *) kCVPixelBufferWidthKey];
//[bufferAttributes setObject: [NSNumber numberWithInt: 480]
// forKey: (NSString *) kCVPixelBufferHeightKey];
adaptor = [[AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil] retain];
//CVPixelBufferPoolCreate (kCFAllocatorSystemDefault,NULL,(CFDictionaryRef)bufferAttributes,&pixelBufferPool);
//Create buffer pool
NSMutableDictionary* attributes;
attributes = [NSMutableDictionary dictionary];
int width = 480;
int height = 320;
[attributes setObject:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithInt:width] forKey: (NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithInt:height] forKey: (NSString*)kCVPixelBufferHeightKey];
CVReturn theError = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, (CFDictionaryRef) attributes, &pixelBufferPool);
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
buffer = NULL;
lastTime = kCMTimeZero;
presentTime = kCMTimeZero;
return YES;
}
Next, the two methods that append the writer and create the pixel buffer to append.
- (void) writeImageToMovie:(CGImageRef)image
{
if([writerInput isReadyForMoreMediaData])
{
// CMTime frameTime = CMTimeMake(1, 20);
// CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 24 of the loop above
// CMTime presentTime=CMTimeAdd(lastTime, frameTime);
buffer = [self pixelBufferFromCGImage:image];
BOOL success = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (!success) NSLog(@"Failed to appendPixelBuffer");
CVPixelBufferRelease(buffer);
presentTime = CMTimeAdd(lastTime, CMTimeMake(5, 1000));
lastTime = presentTime;
}
else
{
NSLog(@"error - writerInput not ready");
}
}
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
{
CVPixelBufferRef pxbuffer;
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
if (pixelBufferPool == NULL) NSLog(@"pixelBufferPool is null!");
CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, pixelBufferPool, &pxbuffer);
/*if (pxbuffer == NULL) {
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
}*/
//NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
//NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
//NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(90, 10, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}