5

我想制作一个简单的应用程序来创建具有特殊效果的视频。以下代码操作从画廊加载的视频并放置视图底部的图像和文本。

- (IBAction)MergeAndSave:(id)sender{
if(firstAsset !=nil){
    [ActivityView startAnimating];

    AVMutableComposition* mixComposition = [AVMutableComposition composition];


    AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVAssetTrack *clipVideoTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    //If you need audio as well add the Asset Track for audio here

    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];

    [compositionVideoTrack setPreferredTransform:[[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];

    CGSize videoSize = [firstAsset naturalSize];
    CALayer *parentLayer = [CALayer layer];
    CALayer *videoLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
    parentLayer.contentsGravity = kCAAlignmentCenter;
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);

    UIImage *myImage = [UIImage imageNamed:@"apple.jpg"];
    CALayer *aLayer = [CALayer layer];
    aLayer.contents = (id)myImage.CGImage;
    aLayer.frame = CGRectMake(5, 25, 228, 228); //Needed for proper display. We are using the app icon (57x57). If you use 0,0 you will not see it
    aLayer.opacity = 0.65; //Feel free to alter the alpha here

    CATextLayer *titleLayer = [CATextLayer layer];
    titleLayer.string = @"HERE";
    titleLayer.fontSize = videoSize.height / 10;
    //?? titleLayer.shadowOpacity = 0.5;
    titleLayer.alignmentMode = kCAAlignmentCenter;
    titleLayer.frame = CGRectMake(5, 25, 228, 228); //You may need to adjust this for proper display

    //animated cloud image

    UIImage *cloudImage = [UIImage imageNamed:@"cloud.png"];

    CALayer *cloud = [CALayer layer];
    cloud.contents = (id)cloudImage.CGImage;
    cloud.bounds = CGRectMake(0, 0, cloudImage.size.width, cloudImage.size.height);
    cloud.position = CGPointMake(videoSize.width / 2,
                                 cloudImage.size.height / 2);

    CGPoint startPt = CGPointMake(videoSize.width + cloud.bounds.size.width / 2,
                                  cloud.position.y);
    CGPoint endPt = CGPointMake(cloud.bounds.size.width / -2,
                                cloud.position.y);

    [parentLayer addSublayer:videoLayer];
    [parentLayer addSublayer:aLayer];
    [parentLayer addSublayer:titleLayer];
    [parentLayer addSublayer:cloud];

    CABasicAnimation *anim = [CABasicAnimation animationWithKeyPath:@"position"];
    anim.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
    anim.fromValue = [NSValue valueWithCGPoint:startPt];
    anim.toValue = [NSValue valueWithCGPoint:endPt];
    anim.repeatCount = HUGE_VALF;
    anim.duration = 8.0;
    [cloud addAnimation:anim forKey:@"position"];

    AVMutableVideoComposition* videoComp = [[AVMutableVideoComposition videoComposition] init];
    videoComp.renderSize = videoSize;
    videoComp.frameDuration = CMTimeMake(1, 30);
    videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

    /// instruction
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
    AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
    videoComp.instructions = [NSArray arrayWithObject: instruction];

    assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];//AVAssetExportPresetPassthrough
    assetExport.videoComposition = videoComp;

    NSString* videoName = @"mynewwatermarkedvideo.mov";

    NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
    NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];

    if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
    {
        [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
    }

    assetExport.outputFileType = AVFileTypeQuickTimeMovie;
    assetExport.outputURL = exportUrl;
    assetExport.shouldOptimizeForNetworkUse = YES;

    [assetExport exportAsynchronouslyWithCompletionHandler:
     ^(void ) {
         [self exportDidFinish:assetExport];
     }
     ];

    }

}

谢谢您的帮助。

4

0 回答 0