当我要使用图像数组创建视频时,当有一个长度超过 100 个对象的数组时,我会崩溃,然后它会发出内存警告并崩溃。任何人都请帮助我如何处理它。我正在做 ARC 项目。请参阅下面我用于 MP4 视频的代码。
NSError *error;
NSString *path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/video.mov"];
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:&error];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:1024], AVVideoWidthKey,
[NSNumber numberWithInt:768], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoStream = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: videoStream sourcePixelBufferAttributes:nil];
[videoWriter addInput:videoStream];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:CMTimeMake(0,3.2)];
CVPixelBufferRef buffer = NULL;
int frameCount = 2;
for(int i=0;i<[imagesArray count];i++)
{
@try{
buffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:i] CGImage] andSize:CGSizeMake(1024,768)];
BOOL append_ok = NO;
if (adaptor.assetWriterInput.readyForMoreMediaData){
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 03.1);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(buffer)
[NSThread sleepForTimeInterval:0.02];
}else
{
[NSThread sleepForTimeInterval:0.1];
}
//time ++;
frameCount++;
}
@catch (NSException *exception) {
NSLog(@"exception---writevideo%@",exception.description);
}
}
if (buffer) {
CVPixelBufferRelease(buffer);
}
[videoStream markAsFinished];
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
{
[videoWriter finishWriting];
});
-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
@try{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0,1024,768),image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
@catch (NSException *exception) {
NSLog(@"exception---pixelBufferFromCGImage%@",exception.description);
}
}
提前致谢。请让我知道我在哪里错了,以及我如何处理它以使其可行。