我正在将叠加层添加到视频中,并在 Facebook 和 whatsaap 上分享。但是视频的质量特别是在whatsapp上变得很差,而其他没有覆盖的视频质量最好。用 overlay 编辑覆盖视频的代码:
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:outputFileURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioComposition = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero error:nil];
[audioComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:sourceAudioTrack atTime:kCMTimeZero error:nil]; //[asset duration]
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
//*****************//
UIImageOrientation videoAssetOrientation_;// = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = clipVideoTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
isVideoAssetPortrait_ = NO;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
isVideoAssetPortrait_ = NO;
}
[layerInstruction setTransform:clipVideoTrack.preferredTransform atTime:kCMTimeZero];
[layerInstruction setOpacity:0.0 atTime:videoAsset.duration];
//*****************//
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(clipVideoTrack.naturalSize.height, clipVideoTrack.naturalSize.width);
} else {
naturalSize = clipVideoTrack.naturalSize;
}
UIImage *myImage = [UIImage imageNamed:@"overlay.png"];
CALayer *aLayer = [CALayer layer];
if (naturalSize.width > 359) {
aLayer.frame = CGRectMake((naturalSize.width-340)/2, 0, 340, 90);
}
else if (naturalSize.width > 200) {
aLayer.frame = CGRectMake(10, 0, naturalSize.width-20, 65);
}
else{
aLayer.frame = CGRectMake(15, 0, naturalSize.width-30, 60);
}
aLayer.contents = (id)myImage.CGImage;
//Needed for proper display. We are using the app icon (57x57). If you use 0,0 you will not see it
// aLayer.opacity = 1.0; //Feel free to alter the alpha here
aLayer.backgroundColor = [UIColor clearColor].CGColor;
[aLayer setMasksToBounds:YES];
// CGSize videoSize = [videoAsset naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
//NSURL *url = [NSURL fileURLWithPath:[urlsOfVideos objectAtIndex:self.pageControl.currentPage]];
NSLog(@"videoSize ++++: %@", NSStringFromCGSize(naturalSize));
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition] ;
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
videoComp.renderSize = CGSizeMake(renderWidth, renderHeight);
videoComp.instructions = [NSArray arrayWithObject:instruction];
videoComp.frameDuration = CMTimeMake(1, 30);
videoLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
parentLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
/// instruction
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
assetExport.outputURL = exportUrl;
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = videoComp;
//[strRecordedFilename setString: exportPath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status) {
case AVAssetExportSessionStatusFailed:{
NSLog(@"Fail");
NSLog(@"asset export fail error : %@", assetExport.error);
dispatch_async(dispatch_get_main_queue(), ^
{
});
break;
}
case AVAssetExportSessionStatusCompleted:{
NSLog(@"Success");
dispatch_async(dispatch_get_main_queue(), ^
{
});
break;
}
default:
break;
}
}
];
}
我究竟做错了什么?任何想法?