我正在尝试构建一个视频混搭应用程序,我需要用户能够设置一个带有恒定视频轨道的轨道,并有第二个轨道作为 b-roll 轨道,它将从主音轨不时展示一些相关内容。我有第一条轨道,所以时间线上的剪辑使用 AVMutableCompositionInstructions 在合成中排列在一起,但我无法理解如何在独立控制的 b-roll 轨道中工作?这几天我一直在为此苦苦挣扎!这是构建第一条轨道内容的代码,我现在将其设置为在剪辑之间变黑。有任何 AVFoundation 大师可以给我一个提示吗?
CMTime nextClipStartTime = kCMTimeZero;
NSInteger i;
CMTime transitionDuration = CMTimeMakeWithSeconds(1,30);
AVMutableCompositionTrack *compositionVideoTrack[2];
AVMutableCompositionTrack *compositionAudioTrack[2];
compositionVideoTrack[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTrack[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTrack[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionVideoTrack[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *bedMusicTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
i = 0;
NSMutableArray *allAudioParams = [NSMutableArray array];
AVMutableAudioMixInputParameters *audioInputParams[2];
audioInputParams[0] = [AVMutableAudioMixInputParameters audioMixInputParameters];
audioInputParams[1] = [AVMutableAudioMixInputParameters audioMixInputParameters];
[audioInputParams[0] setTrackID: compositionAudioTrack[0].trackID];
[audioInputParams[1] setTrackID: compositionAudioTrack[1].trackID];
float lastVol = 0;
NSMutableArray *instructions = [NSMutableArray array];
for(ClipInfo *info in videoLine.items){
AVAsset *asset = [AVAsset assetWithURL:info.url];
CMTimeRange timeRangeInAsset = CMTimeRangeMake(info.inTime, info.duration);
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack[0] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack[0] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
if(i != 0){
[audioInputParams[0] setVolume:lastVol atTime: CMTimeSubtract(nextClipStartTime,CMTimeMakeWithSeconds(1,30))];;
}
[audioInputParams[0] setVolume:info.volume atTime:nextClipStartTime];
lastVol = info.volume;
CMTime clipStartTime = (i == 0) ? nextClipStartTime : CMTimeAdd(nextClipStartTime,transitionDuration);
CMTime clipDuration = (i == 0 || i == (videoLine.items.count - 1)) ? CMTimeSubtract(timeRangeInAsset.duration, transitionDuration) : CMTimeSubtract(timeRangeInAsset.duration, CMTimeMultiply(transitionDuration, 2));
if([videoLine.items count] == 1){
clipDuration = timeRangeInAsset.duration;
}
if(i != 0){
//trans in
AVMutableVideoCompositionInstruction *inInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inInstruction.timeRange = CMTimeRangeMake(nextClipStartTime, transitionDuration);
AVMutableVideoCompositionLayerInstruction *fadeIn = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack[0]];
[fadeIn setOpacityRampFromStartOpacity:0 toEndOpacity:1 timeRange:CMTimeRangeMake(nextClipStartTime, transitionDuration)];
inInstruction.layerInstructions = [NSArray arrayWithObject:fadeIn];
[instructions addObject:inInstruction];
}
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = CMTimeRangeMake(clipStartTime,clipDuration);
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack[0]];
passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
[instructions addObject:passThroughInstruction];
if(i < (videoLine.items.count - 1)){
//fade out
AVMutableVideoCompositionInstruction *outInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
outInstruction.timeRange = CMTimeRangeMake(CMTimeAdd(clipStartTime,clipDuration), transitionDuration);
AVMutableVideoCompositionLayerInstruction *fadeOut = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack[0]];
[fadeOut setOpacityRampFromStartOpacity:1.0 toEndOpacity:0 timeRange:CMTimeRangeMake(CMTimeAdd(clipStartTime,clipDuration), transitionDuration)];
outInstruction.layerInstructions = [NSArray arrayWithObject:fadeOut];
[instructions addObject:outInstruction];
}
nextClipStartTime = CMTimeAdd(nextClipStartTime,timeRangeInAsset.duration);
if(i == ([videoLine.items count] - 1)){
[audioInputParams[0] setVolume:info.volume atTime:nextClipStartTime];
}
i++;
}