0

我徒劳地尝试使用 AVFoundation 库为现有视频添加水印。我按照说明并尝试从这个问题的第三个答案中重写 Swift 中的代码:录制视频上的 iPhone 水印。– 但这对我不起作用。

每次运行我的代码时,我只会看到一个黑色视频,它以我的源视频的长度呈现,需要加水印。我的目标是在 5 秒后淡入水印。

这是我的代码:

let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: NSURL(fileURLWithPath: moviePath), options: nil)

// GET THE VIDEO TRACK
let vtrack =  vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

do {
    let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
    try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)

    compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
    print(error)
}

let animationImage: UIImage = self.artworkImage

let artWorkOverlayLayer: CALayer = CALayer()
artWorkOverlayLayer.contents = (animationImage.CGImage as! AnyObject)
artWorkOverlayLayer.frame = CGRectMake(0, 0, 512, 512)
artWorkOverlayLayer.opacity = 0
artWorkOverlayLayer.masksToBounds = true

let animation: CABasicAnimation = CABasicAnimation(keyPath: "opacity")
animation.duration = 10
animation.repeatCount = 0
animation.autoreverses = false
animation.fromValue = Int(0.0)
animation.toValue = Int(1.0)
animation.beginTime = 5.0
artWorkOverlayLayer.addAnimation(animation, forKey: "animateOpacity")

let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height)

let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height)
parentlayer.addSublayer(artWorkOverlayLayer)

let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = videoTrack.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)

let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]

// EXPORT
let filePath: NSURL = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingString("output-tmp.mp4"))

let assetExportSession: AVAssetExportSession! = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)

assetExportSession.outputFileType = AVFileTypeMPEG4
assetExportSession.outputURL = filePath
assetExportSession.videoComposition = layercomposition

assetExportSession.exportAsynchronouslyWithCompletionHandler({() -> Void in
    print(filePath)
})
4

0 回答 0