我的目标是组合一组从相机录制的剪辑,并以特定的首选尺寸导出它们。当然,导出前需要旋转视频方向。
我通过从一组视频剪辑中组合一个 AVMutableComposition 来做到这一点,这些视频剪辑存储在下面的 avAssets 中。我能够很好地组合它们,并将其导出。但是,我在 AVMutableVideoComposition 上设置的旋转变换没有得到遵守。如果我使用相同的变换并将其设置在视频轨道的 preferredTransform 属性上,那么它就可以工作。在这两种情况下,视频 renderSize 都没有得到尊重。就好像导出器完全忽略了 videoComposition 一样。任何想法可能会发生什么?
我确实有一个 AVCaptureSession 正在运行,但我在导出之前将其关闭,这没有任何区别。我对 iOS 编程相当陌生,所以我可能缺少一些基本的东西。:)
我的代码:
-(void) finalRecord{
NSError *error = nil;
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSLog(@"Video track id is %d", [compositionVideoTrack trackID]);
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// avAssets hold the video clips to be composited
int pieces = [avAssets count];
CGAffineTransform transform = CGAffineTransformMakeRotation( M_PI_2);
// [compositionVideoTrack setPreferredTransform:transform];
for (int i = 0; i<pieces; i++) {
AVURLAsset *sourceAsset = [avAssets objectAtIndex:i];
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[timeRanges addObject:[NSValue valueWithCMTimeRange:CMTimeRangeMake(kCMTimeZero, sourceAsset.duration)]];
[videoTracks addObject:sourceVideoTrack];
[audioTracks addObject:sourceAudioTrack];
}
[compositionVideoTrack insertTimeRanges:timeRanges ofTracks:videoTracks atTime:kCMTimeZero error:&error];
[compositionAudioTrack insertTimeRanges:timeRanges ofTracks:audioTracks atTime:kCMTimeZero error:&error];
AVMutableVideoCompositionInstruction *vtemp = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
vtemp.timeRange = CMTimeRangeMake(kCMTimeZero, [composition duration]);
NSLog(@"\nInstruction vtemp's time range is %f %f", CMTimeGetSeconds( vtemp.timeRange.start),
CMTimeGetSeconds(vtemp.timeRange.duration));
// Also tried videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack
AVMutableVideoCompositionLayerInstruction *vLayerInstruction = [AVMutableVideoCompositionLayerInstruction
videoCompositionLayerInstructionWithAssetTrack:composition.tracks[0]];
[vLayerInstruction setTransform:transform atTime:kCMTimeZero];
vtemp.layerInstructions = @[vLayerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(320.0, 240.0);
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.instructions = @[vtemp];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:gVideoExportQuality];
NSParameterAssert(exporter != nil);
exporter.videoComposition = videoComposition;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
NSString *rootName = [[self captureManager] tempFileRoot];
NSString *temp = [NSString stringWithFormat:@"%@%@.mov", NSTemporaryDirectory(), rootName];
exporter.outputURL = [NSURL fileURLWithPath:temp ];
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(@"Export failed: %@", [exporter error]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(@"Export successfully");
[self exportFile:exporter.outputURL];
[self.delegate recordingEndedWithFile:exporter.outputURL];
isExporting = FALSE;
[[[self captureManager] session] startRunning];
break;
default:
break;
}
if (exporter.status != AVAssetExportSessionStatusCompleted){
NSLog(@"Retry export");
}
}];
}