我在我的 iOS 应用程序中使用 AVCaptureConnection 捕获视频。之后,我在视频中添加了一些图像作为 CALayers。一切正常,但在添加图像后,我在生成的视频的最后得到一个黑框。没有实际的音频/视频帧受到影响。对于音频,我提取它并更改其音高,然后使用 AVMutableComposition 添加它。这是我正在使用的代码。请帮助我解决我做错了什么或者我需要添加其他内容。
cmp = [AVMutableComposition composition];
AVMutableCompositionTrack *videoComposition = [cmp addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioComposition = [cmp addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *sourceVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[videoComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:sourceVideoTrack atTime:kCMTimeZero error:nil] ;
[audioComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:sourceAudioTrack atTime:kCMTimeZero error:nil];
animComp = [AVMutableVideoComposition videoComposition];
animComp.renderSize = CGSizeMake(320, 320);
animComp.frameDuration = CMTimeMake(1,30);
animComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
// to gather the audio part of the video
NSArray *tracksToDuck = [cmp tracksWithMediaType:AVMediaTypeAudio];
NSMutableArray *trackMixArray = [NSMutableArray array];
for (NSInteger i = 0; i < [tracksToDuck count]; i++) {
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:[tracksToDuck objectAtIndex:i]];
[trackMix setVolume:5 atTime:kCMTimeZero];
[trackMixArray addObject:trackMix];
}
audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = trackMixArray;
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVMutableVideoCompositionLayerInstruction *layerVideoInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoComposition];
[layerVideoInstruction setOpacity:1.0 atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject:layerVideoInstruction] ;
animComp.instructions = [NSArray arrayWithObject:instruction];
[self exportMovie:self];
这是我导出视频的方法
-(IBAction) exportMovie:(id)sender{
//successCheck = NO;
NSArray *docPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *tempPath = [docPaths objectAtIndex:0];
//NSLog(@"Temp Path: %@",tempPath);
NSString *fileName = [NSString stringWithFormat:@"%@/Final.MP4",tempPath];
NSFileManager *fileManager = [NSFileManager defaultManager] ;
if([fileManager fileExistsAtPath:fileName ]){
NSError *ferror = nil ;
[fileManager removeItemAtPath:fileName error:&ferror];
}
NSURL *exportURL = [NSURL fileURLWithPath:fileName];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:cmp presetName:AVAssetExportPresetMediumQuality] ;
exporter.outputURL = exportURL;
exporter.videoComposition = animComp;
//exporter.audioMix = audioMix;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:{
NSLog(@"Fail");
break;
}
case AVAssetExportSessionStatusCompleted:{
NSLog(@"Success video");
});
break;
}
default:
break;
}
}];
NSLog(@"outside");
}