我正在录制小视频剪辑(大约一秒钟左右,前后摄像头都有,可能有不同的方向)。然后尝试使用 AVAssetExportSession 合并它们。我基本上用适当的变换和音频和视频轨道制作合成和视频合成。
问题是,在 iOS 5 上,如果您有 4 个以上的视频剪辑,它会失败,而在 iOS 6 上,限制似乎是 16 个剪辑。
这对我来说似乎真的很令人费解。AVAssetExportSession 是在做一些奇怪的事情,还是对可以传递给它的剪辑数量有一些未记录的限制?以下是我的代码的一些摘录:
-(void)exportVideo
{
AVMutableComposition *composition = video.composition;
AVMutableVideoComposition *videoComposition = video.videoComposition;
NSString * presetName = AVAssetExportPresetMediumQuality;
AVAssetExportSession *_assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:presetName];
self.exportSession = _assetExport;
videoComposition.renderSize = CGSizeMake(640, 480);
_assetExport.videoComposition = videoComposition;
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent: @"export.mov"];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
// Delete the currently exported files if it exists
if([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:^{
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(@"Completed exporting!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Failed:%@", _assetExport.error.description);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Canceled:%@", _assetExport.error);
break;
default:
break;
}
}];
}
以下是这些作品的制作方式:
-(void)setVideoAndExport
{
video = nil;
video = [[VideoComposition alloc] initVideoTracks];
CMTime localTimeline = kCMTimeZero;
// Create the composition of all videofiles
for (NSURL *url in outputFileUrlArray) {
AVAsset *asset = [[AVURLAsset alloc]initWithURL:url options:nil];
[video setVideo:url at:localTimeline];
localTimeline = CMTimeAdd(localTimeline, asset.duration); // Increment the timeline
}
[self exportVideo];
}
这是 VideoComposition 类的主要内容:
-(id)initVideoTracks
{
if((self = [super init]))
{
composition = [[AVMutableComposition alloc] init];
addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instructions = [[NSMutableArray alloc] init];
videoComposition = [AVMutableVideoComposition videoComposition];
}
return self;
}
-(void)setVideo:(NSURL*) url at:(CMTime)to
{
asset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionTrackVideo = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionTrackVideo insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack: assetTrack atTime:to error:nil];
AVMutableCompositionTrack *compositionTrackAudio = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionTrackAudio insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:to error:nil];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(to, asset.duration));
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionTrackVideo];
[layerInstruction setTransform: assetTrack.preferredTransform atTime: kCMTimeZero];
[layerInstruction setOpacity:0.0 atTime:CMTimeAdd(to, asset.duration)];
[instructions addObject:layerInstruction];
mainInstruction.layerInstructions = instructions;
videoComposition.instructions = [NSArray arrayWithObject:mainInstruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
}