我不太确定如何提出这个问题,如果有任何关于改进这个问题的有用反馈,我将不胜感激。我正在尝试创建一个接受视频 URL 作为输入(本地视频)的函数,而该函数又尝试创建一个背景模糊的视频,原始视频位于其中心并按比例缩小。我的问题是我的代码工作正常,除了当我使用直接从 iPhone 相机录制的视频时。
我试图实现的一个例子如下(取自我的代码):
这里的输入视频是mp4。我已经能够使代码与我在线下载的 mov 文件一起工作。但是当我使用从 iOS 相机录制的 mov 文件时,我最终得到以下结果:
(我怎样才能在问题中发布占用更少空间的图片?)
现在,我不知道如何问这个问题的原因是因为在这个过程中有相当多的代码,我无法完全缩小问题的范围,但我相信它在我将粘贴的函数中以下。我还将发布一个指向 github 存储库的链接,我的项目的准系统版本已经发布给任何好奇或愿意提供帮助的人。我必须承认,我使用的代码最初是由一个名为 TheTiger 的 StackOverflow 用户在以下问题上编写的:AVFoundation - 添加模糊背景到视频。我已经重构了其中的一部分,并且在他们的许可下,被允许在此处发布问题。
我的 github 存储库链接在这里:GITHUB REPO 我的演示设置了 3 个不同的视频,一个从网络下载的 mp4(工作),一个从网络下载的 mov(工作)和一个我在手机上录制的 mov(不是在职的)
我想象的导致问题的代码在这里:
fileprivate func addAllVideosAtCenterOfBlur(asset: AVURLAsset, blurVideo: AVURLAsset, scale: CGFloat, completion: @escaping BlurredBackgroundManagerCompletion) {
    let mixComposition = AVMutableComposition()
    var instructionLayers : Array<AVMutableVideoCompositionLayerInstruction> = []
    let blurVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
    if let videoTrack = blurVideo.tracks(withMediaType: AVMediaType.video).first {
        let timeRange = CMTimeRange(start: .zero, duration: blurVideo.duration)
        try? blurVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: .zero)
    }
    let timeRange = CMTimeRange(start: .zero, duration: asset.duration)
    let track = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
    if let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first {
        try? track?.insertTimeRange(timeRange, of: videoTrack, at: .zero)
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track!)
        let properties = scaleAndPositionInAspectFitMode(forTrack: videoTrack, inArea: size, scale: scale)
        let videoOrientation = videoTrack.getVideoOrientation()
        let assetSize = videoTrack.assetSize()
        let preferredTransform = getPreferredTransform(videoOrientation: videoOrientation, assetSize: assetSize, defaultTransform: asset.preferredTransform, properties: properties)
        layerInstruction.setTransform(preferredTransform, at: .zero)
        instructionLayers.append(layerInstruction)
    }
    /// Adding audio
    if let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first {
        let aTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
        try? aTrack?.insertTimeRange(timeRange, of: audioTrack, at: .zero)
    }
    /// Blur layer instruction
    let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: blurVideoTrack!)
    instructionLayers.append(layerInstruction)
    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = timeRange
    mainInstruction.layerInstructions = instructionLayers
    let mainCompositionInst = AVMutableVideoComposition()
    mainCompositionInst.instructions = [mainInstruction]
    mainCompositionInst.frameDuration = CMTimeMake(value: 1, timescale: 30)
    mainCompositionInst.renderSize = size
    //let url = URL(fileURLWithPath: "/Users/enacteservices/Desktop/final_video.mov")
    let url = self.videoOutputUrl(filename: "finalBlurred")
    try? FileManager.default.removeItem(at: url)
    performExport(composition: mixComposition, instructions: mainCompositionInst, stage: 2, outputUrl: url) { (error) in
        if let error = error {
            completion(nil, error)
        } else {
            completion(url, nil)
        }
    }
}
getPreferredTransform() 函数也非常相关:
fileprivate func getPreferredTransform(videoOrientation: UIImage.Orientation, assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
    switch videoOrientation {
    case .down:
        return handleDownOrientation(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
    case .left:
        return handleLeftOrientation(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
    case .right:
        return handleRightOrientation(properties: properties)
    case .up:
        return handleUpOrientation(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
    default:
        return handleOtherCases(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
    }
}
fileprivate func handleDownOrientation(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
    let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi/2.0))
    // Scale
    let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
    // Translate
    var ytranslation: CGFloat = assetSize.height
    var xtranslation: CGFloat = 0
    if properties.position.y == 0 {
        xtranslation = -(assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
    }
    else {
        ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
    }
    let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
    // Final transformation - Concatination
    let finalTransform = defaultTransform.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
    return finalTransform
}
fileprivate func handleLeftOrientation(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
    let rotateTransform = CGAffineTransform(rotationAngle: -CGFloat(Double.pi))
    // Scale
    let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
    // Translate
    var ytranslation: CGFloat = assetSize.height
    var xtranslation: CGFloat = assetSize.width
    if properties.position.y == 0 {
        xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
    } else {
        ytranslation = assetSize.height - (assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
    }
    let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
    // Final transformation - Concatination
    let finalTransform = defaultTransform.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
    return finalTransform
}
fileprivate func handleRightOrientation(properties: Properties) -> CGAffineTransform  {
    let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
    // Translate
    let translationTransform = CGAffineTransform(translationX: properties.position.x, y: properties.position.y)
    let finalTransform  = scaleTransform.concatenating(translationTransform)
    return finalTransform
}
fileprivate func handleUpOrientation(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
    return handleOtherCases(assetSize: assetSize, defaultTransform: defaultTransform, properties: properties)
}
fileprivate func handleOtherCases(assetSize: CGSize, defaultTransform: CGAffineTransform, properties: Properties) -> CGAffineTransform {
    let rotateTransform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/2.0))
    let scaleTransform = CGAffineTransform(scaleX: properties.scale.width, y: properties.scale.height)
    var ytranslation: CGFloat = 0
    var xtranslation: CGFloat = assetSize.width
    if properties.position.y == 0 {
        xtranslation = assetSize.width - (assetSize.width - ((size.width/size.height) * assetSize.height))/2.0
    }
    else {
        ytranslation = -(assetSize.height - ((size.height/size.width) * assetSize.width))/2.0
    }
    let translationTransform = CGAffineTransform(translationX: xtranslation, y: ytranslation)
    let finalTransform = defaultTransform.concatenating(rotateTransform).concatenating(translationTransform).concatenating(scaleTransform)
    return finalTransform
}


