我正在尝试从一些图片创建电影。它适用于高清图片 ({720, 1280}) 或更低分辨率。但是当我尝试用全高清图片 {1080, 1920} 创建电影时,视频被打乱了。这是一个查看它的外观的链接http://www.youtube.com/watch?v=BfYldb8e_18。你有什么想法我可能做错了吗?
- (void) createMovieWithOptions:(NSDictionary *) options
{
@autoreleasepool {
NSString *path = [options valueForKey:@"path"];
CGSize size = [(NSValue *)[options valueForKey:@"size"] CGSizeValue];
NSArray *imageArray = [options valueForKey:@"pictures"];
NSInteger recordingFPS = [[options valueForKey:@"fps"] integerValue];
BOOL success=YES;
NSError *error = nil;
AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(assetWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithFloat:size.width], AVVideoWidthKey,
[NSNumber numberWithFloat:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput *videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
// Configure settings for the pixel buffer adaptor.
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:bufferAttributes];
NSParameterAssert(videoWriterInput);
NSParameterAssert([assetWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter addInput:videoWriterInput];
//Start a session:
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
float progress = 0;
float progressFromFrames = _progressView.progress; //only for create iflipbook movie
for(UIImage * img in imageArray)
{
if([[NSThread currentThread] isCancelled])
{
[NSThread exit];
}
[condCreateMovie lock];
if(isCreateMoviePaused)
{
[condCreateMovie wait];
}
uint64_t totalFreeSpace=[Utils getFreeDiskspace];
if(((totalFreeSpace/1024ll)/1024ll)<50)
{
success=NO;
break;
}
// @autoreleasepool {
NSLog(@"size:%@",NSStringFromCGSize(img.size));
buffer = [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:[img CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 60)
{
if(adaptor.assetWriterInput.readyForMoreMediaData)
{
CMTime frameTime = CMTimeMake(frameCount, recordingFPS);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.1];
if(isCreatingiFlipBookFromImported)
progress = (float)frameCount/(float)[imageArray count]/2.0 + progressFromFrames;
else
progress = (float)frameCount/(float)[imageArray count];
[[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationProgress" object:[NSNumber numberWithFloat:progress]];
}
else
{
[NSThread sleepForTimeInterval:0.5];
}
j++;
}
if (!append_ok)
{
NSLog(@"error appending image %d times %d\n", frameCount, j);
}
frameCount++;
[condCreateMovie unlock];
}
//Finish the session:
[videoWriterInput markAsFinished];
[assetWriter finishWriting];
NSDictionary *dict = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:success], @"success",
path, @"path", nil];
[[NSNotificationCenter defaultCenter] postNotificationName:@"movieCreationFinished" object:dict];
}
}
*编辑 。这是 [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:] 的代码
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
@autoreleasepool {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
}