我正在使用 AVCaptureSession 捕获视频并将其保存到 cameraroll ,一切正常,但问题是如何导出它并在其上写入文本,甚至在导出时放置图像?谢谢
问问题
372 次
2 回答
2
看AVMutableVideoCompositionLayerInstruction
你基本上需要在你的视频中添加层指令,我不记得链接但是有一个 Apple WWDC 视频关于它。
于 2013-10-24T08:21:39.667 回答
0
我已经设法在视频上写入图像和导出捆绑电影的 cattextlayer ,一切都很好,但现在我怎样才能将文本写入特定帧?(时间)
这是我的有效代码,
- (IBAction)btn1:(id)sender {
NSString *filePath = [[NSBundle mainBundle] pathForResource:@"movie" ofType:@"mov"];
//NSURL * urlpath = [NSURL URLWithString:filePath];
NSLog(@"%@",filePath);
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:filePath] options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
AVMutableCompositionTrack *audioTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferre dTrackID :kCMPers istentTr ackID_In valid];
[audioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration )
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:nil];
CGSize videoSize = [clipVideoTrack naturalSize];
UIImage *myImage = [UIImage imageNamed:@"close@2x.png"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(videoSize.width - 65, videoSize.height - 75, 57, 57 );
aLayer.opacity = 0.65;
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = @"Text goes here";
titleLayer.font = (__bridge CFTypeRef)(@"Helvetica");
titleLayer.fontSize = videoSize.height / 6;
//?? titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentCenter;
titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height / 6); //You may need to adjust this for proper display
[parentLayer addSublayer:titleLayer];
AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [ AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration ]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [ AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality] ;//AVAssetExportPresetPassthrough
assetExport.videoComposition = videoComp;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString* VideoName = [NSString stringWithFormat:@"%@/mynewwatermarkedvideo. mp4",documentsDirectory];
//NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:VideoName];
NSURL *exportUrl = [NSURL fileURLWithPath:VideoName];
if ([[NSFileManager defaultManager] fileExistsAtPath:VideoName])
{
[[NSFileManager defaultManager] removeItemAtPath:VideoName error:nil];
}
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
assetExport.outputURL = exportUrl;
assetExport.shouldOptimizeForNetworkUse = YES;
//[strRecordedFilename setString: exportPath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
//[assetExport release];
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:assetExport];
});
}
];
}
-(void)exportDidFinish:(AVAssetExportSession*)session
{
NSURL *exportUrl = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
NSLog(@"%@",exportUrl);
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:exportUrl])
{
[library writeVideoAtPathToSavedPhotosAlbum:exportUrl completionBlock:^( NSURL *assetURL, NSError *error)
{
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Video Saving Failed"
delegate:nil cancelButtonT itle:@"OK" ot herButtonTitl es:nil];
[alert show];
} else {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Video Saved" message:@"Saved To Photo Album"
delegate:self cancelButton Title:@"OK" o therButtonTit les:nil];
[alert show];
}
});
}];
}
NSLog(@"Completed");
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"AlertView" message:@"Video is edited successfully." delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
}
于 2013-10-26T22:06:12.373 回答