4

我正在尝试使用AVVideoComposition在视频顶部添加一些文本并保存视频。这是我使用的代码:

创建一个AVMutableComposition and AVVideoComposition

var mutableComp =          AVMutableComposition()
var mutableVidComp =       AVMutableVideoComposition()
var compositionSize :      CGSize?

func configureAsset(){

    let options =               [AVURLAssetPreferPreciseDurationAndTimingKey : "true"]
    let videoAsset =             AVURLAsset(url: Bundle.main.url(forResource: "Car", withExtension: "mp4")! , options : options)
    let videoAssetSourceTrack =  videoAsset.tracks(withMediaType: AVMediaTypeVideo).first! as AVAssetTrack

    compositionSize = videoAssetSourceTrack.naturalSize

    let mutableVidTrack =       mutableComp.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    let trackRange =            CMTimeRangeMake(kCMTimeZero, videoAsset.duration)

    do {
        try mutableVidTrack.insertTimeRange( trackRange, of: videoAssetSourceTrack, at: kCMTimeZero)

        mutableVidTrack.preferredTransform = videoAssetSourceTrack.preferredTransform

    }catch { print(error) }

    snapshot =       mutableComp
    mutableVidComp = AVMutableVideoComposition(propertiesOf: videoAsset)
 }

II 设置图层

  func applyVideoEffectsToComposition()   {

    // 1 - Set up the text layer
    let subTitle1Text =            CATextLayer()
    subTitle1Text.font =           "Helvetica-Bold" as CFTypeRef
    subTitle1Text.frame =           CGRect(x: self.view.frame.midX - 60 , y: self.view.frame.midY - 50, width: 120, height: 100)
    subTitle1Text.string =         "Bench"
    subTitle1Text.foregroundColor = UIColor.black.cgColor
    subTitle1Text.alignmentMode =   kCAAlignmentCenter

    // 2 - The usual overlay
    let overlayLayer = CALayer()
    overlayLayer.addSublayer(subTitle1Text)
    overlayLayer.frame = CGRect(x: 0, y: 0, width: compositionSize!.width, height: compositionSize!.height)
    overlayLayer.masksToBounds = true


    // 3 - set up the parent layer
    let parentLayer =   CALayer()
    let videoLayer =    CALayer()
    parentLayer.frame = CGRect(x: 0, y: 0, width: compositionSize!.width, height: compositionSize!.height)
    videoLayer.frame =  CGRect(x: 0, y: 0, width: compositionSize!.width, height: compositionSize!.height)

    parentLayer.addSublayer(videoLayer)
    parentLayer.addSublayer(overlayLayer)

    mutableVidComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

 }

.保存视频AVMutbaleVideoComposition

func saveAsset (){

    func deleteFile(_ filePath:URL) {

        guard FileManager.default.fileExists(atPath: filePath.path) else { return }

        do {
            try    FileManager.default.removeItem(atPath: filePath.path) }
        catch {fatalError("Unable to delete file: \(error) : \(#function).")} }


    let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
    let filePath =           documentsDirectory.appendingPathComponent("rendered-audio.mp4")
    deleteFile(filePath)

    if let exportSession = AVAssetExportSession(asset: mutableComp , presetName: AVAssetExportPresetHighestQuality){

        exportSession.videoComposition = mutableVidComp

        //  exportSession.canPerformMultiplePassesOverSourceMediaData = true
        exportSession.outputURL =                   filePath
        exportSession.shouldOptimizeForNetworkUse = true
        exportSession.timeRange =                   CMTimeRangeMake(kCMTimeZero, mutableComp.duration)
        exportSession.outputFileType =              AVFileTypeQuickTimeMovie



        exportSession.exportAsynchronously {
            print("finished: \(filePath) :  \(exportSession.status.rawValue) ")

            if exportSession.status.rawValue == 4 {

                print("Export failed -> Reason: \(exportSession.error!.localizedDescription))")
                print(exportSession.error!)

            }

        }

    }

}

然后我运行方法中的所有三个方法viewDidLoad进行快速测试。问题是当我运行应用程序时,导出的结果是没有标题的原始视频。

我在这里想念什么?

更新

我注意到 在代码的第二subTitle1Text.backgroundColor部分添加一个属性会使导出时对应 的颜色 出现在视频顶部。CGRectsubTitle1Text.frame

(见图片)

当修改此代码以使用AVSynchronizedLayer所需图层进行播放时,可以在视频顶部看到带有文本的视频。所以也许这AVFoundation本身就是一个错误。

我想我只能选择使用customVideoCompositorClass. 这样做的问题是渲染视频需要很多时间。这是一个使用AVVideoCompositing的示例

4

1 回答 1

3

这是我在项目中使用的完整工作代码。它将在底部 (0,0) 显示 CATextLayer。并且在导出会话完成时,它将替换播放器项目中的新路径。我使用了 Objective C 代码中的一个模型来获取方向。请在设备中进行测试。AVPLayer 不会在模拟器中正确显示文本层。

let composition = AVMutableComposition.init()

    let videoComposition = AVMutableVideoComposition()
    videoComposition.frameDuration = CMTimeMake(1, 30)
    videoComposition.renderScale  = 1.0

    let compositionCommentaryTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)


    let compositionVideoTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)


    let clipVideoTrack:AVAssetTrack = self.currentAsset.tracks(withMediaType: AVMediaTypeVideo)[0]

    let audioTrack: AVAssetTrack? = self.currentAsset.tracks(withMediaType: AVMediaTypeAudio)[0]

    try? compositionCommentaryTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.currentAsset.duration), of: audioTrack!, at: kCMTimeZero)

    try? compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.currentAsset.duration), of: clipVideoTrack, at: kCMTimeZero)

    let orientation = VideoModel.videoOrientation(self.currentAsset)
    var isPortrait = false

    switch orientation {
    case .landscapeRight:
        isPortrait = false
    case .landscapeLeft:
        isPortrait = false
    case .portrait:
        isPortrait = true
    case .portraitUpsideDown:
        isPortrait = true
    }

    var naturalSize = clipVideoTrack.naturalSize

    if isPortrait
    {
        naturalSize = CGSize.init(width: naturalSize.height, height: naturalSize.width)
    }

    videoComposition.renderSize = naturalSize

    let scale = CGFloat(1.0)

    var transform = CGAffineTransform.init(scaleX: CGFloat(scale), y: CGFloat(scale))

    switch orientation {
    case .landscapeRight: break
    // isPortrait = false
    case .landscapeLeft:
        transform = transform.translatedBy(x: naturalSize.width, y: naturalSize.height)
        transform = transform.rotated(by: .pi)
    case .portrait:
        transform = transform.translatedBy(x: naturalSize.width, y: 0)
        transform = transform.rotated(by: CGFloat(M_PI_2))
    case .portraitUpsideDown:break
    }

    let frontLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack!)
    frontLayerInstruction.setTransform(transform, at: kCMTimeZero)

    let MainInstruction = AVMutableVideoCompositionInstruction()
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
    MainInstruction.layerInstructions = [frontLayerInstruction]
    videoComposition.instructions = [MainInstruction]

    let parentLayer = CALayer.init()
    parentLayer.frame = CGRect.init(x: 0, y: 0, width: naturalSize.width, height: naturalSize.height)

    let videoLayer = CALayer.init()
    videoLayer.frame = parentLayer.frame


    let layer = CATextLayer()
    layer.string = "HELLO ALL"
    layer.foregroundColor = UIColor.white.cgColor
    layer.backgroundColor = UIColor.orange.cgColor
    layer.fontSize = 32
    layer.frame = CGRect.init(x: 0, y: 0, width: 300, height: 100)

    var rct = layer.frame;

    let widthScale = self.playerView.frame.size.width/naturalSize.width

    rct.size.width /= widthScale
    rct.size.height /= widthScale
    rct.origin.x /= widthScale
    rct.origin.y /= widthScale



    parentLayer.addSublayer(videoLayer)
    parentLayer.addSublayer(layer)

    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool.init(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

    let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
    let videoPath = documentsPath+"/cropEditVideo.mov"

    let fileManager = FileManager.default

    if fileManager.fileExists(atPath: videoPath)
    {
        try! fileManager.removeItem(atPath: videoPath)
    }

    print("video path \(videoPath)")

    var exportSession = AVAssetExportSession.init(asset: composition, presetName: AVAssetExportPresetHighestQuality)
    exportSession?.videoComposition = videoComposition
    exportSession?.outputFileType = AVFileTypeQuickTimeMovie
    exportSession?.outputURL = URL.init(fileURLWithPath: videoPath)
    exportSession?.videoComposition = videoComposition
    var exportProgress: Float = 0
    let queue = DispatchQueue(label: "Export Progress Queue")
    queue.async(execute: {() -> Void in
        while exportSession != nil {
            //                int prevProgress = exportProgress;
            exportProgress = (exportSession?.progress)!
            print("current progress == \(exportProgress)")
            sleep(1)
        }
    })

    exportSession?.exportAsynchronously(completionHandler: {


        if exportSession?.status == AVAssetExportSessionStatus.failed
        {
            print("Failed \(exportSession?.error)")
        }else if exportSession?.status == AVAssetExportSessionStatus.completed
        {
            exportSession = nil

            let asset = AVAsset.init(url: URL.init(fileURLWithPath: videoPath))
            DispatchQueue.main.async {
                let item = AVPlayerItem.init(asset: asset)


                self.player.replaceCurrentItem(with: item)

                let assetDuration = CMTimeGetSeconds(composition.duration)
                self.progressSlider.maximumValue = Float(assetDuration)

                self.syncLayer.removeFromSuperlayer()
                self.lblIntro.isHidden = true

                self.player.play()
                //                    let url =  URL.init(fileURLWithPath: videoPath)
                //                    let activityVC = UIActivityViewController(activityItems: [url], applicationActivities: [])
                //                    self.present(activityVC, animated: true, completion: nil)
            }

        }
    })

下面是 My VideoModel 类的代码

-(AVCaptureVideoOrientation)videoOrientation:(AVAsset *)asset
{
    AVCaptureVideoOrientation result = 0;
    NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
    if([tracks    count] > 0) {
        AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
        CGAffineTransform t = videoTrack.preferredTransform;
        // Portrait
        if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
        {
            result = AVCaptureVideoOrientationPortrait;
        }
        // PortraitUpsideDown
        if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0)  {

            result = AVCaptureVideoOrientationPortraitUpsideDown;
        }
        // LandscapeRight
        if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
        {
            result = AVCaptureVideoOrientationLandscapeRight;
        }
        // LandscapeLeft
        if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
        {
            result = AVCaptureVideoOrientationLandscapeLeft;
        }
    }
    return result;
}

如果您在这方面需要更多帮助,请告诉我。

于 2017-07-25T18:37:26.003 回答