这是@Jagie 代码的最新快速版本。
extension AVURLAsset
{
func exportVideo(presetName: String = AVAssetExportPresetHighestQuality, outputFileType: AVFileType = .mp4, fileExtension: String = "mp4", then completion: @escaping (URL?) -> Void)
{
let filename = url.deletingPathExtension().appendingPathExtension(fileExtension).lastPathComponent
let outputURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
do { // delete old video, if already exists
try FileManager.default.removeItem(at: outputURL)
} catch {
print(error.localizedDescription)
}
guard let sourceAudioTrack = self.tracks(withMediaType: .audio).first else { return }
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
do{
try compositionAudioTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: self.duration), of: sourceAudioTrack, at: CMTime.zero)
}catch(let e){
print("error: \(e)")
}
if let session = AVAssetExportSession(asset: composition, presetName: presetName) {
session.outputURL = outputURL
session.outputFileType = outputFileType
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRangeMake(start: start, duration: duration)
session.timeRange = range
session.shouldOptimizeForNetworkUse = true
session.videoComposition = getVideoComposition(asset: self, composition: composition)
session.exportAsynchronously {
switch session.status {
case .completed:
completion(outputURL)
case .cancelled:
debugPrint("Video export cancelled.")
completion(nil)
case .failed:
let errorMessage = session.error?.localizedDescription ?? "n/a"
debugPrint("Video export failed with error: \(errorMessage)")
completion(nil)
default:
break
}
}
} else {
completion(nil)
}
}
private func getVideoComposition(asset: AVAsset, composition: AVMutableComposition) -> AVMutableVideoComposition{
let isPortrait = isVideoPortrait()
let compositionVideoTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID:kCMPersistentTrackID_Invalid)!
let videoTrack:AVAssetTrack = asset.tracks(withMediaType: .video).first!
do{
try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: asset.duration), of: videoTrack, at: CMTime.zero)
}catch(let e){
print("Error: \(e)")
}
let layerInst:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack)
let transform = videoTrack.preferredTransform
layerInst.setTransform(transform, at: CMTime.zero)
let inst:AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
inst.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: asset.duration);
inst.layerInstructions = [layerInst]
let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
videoComposition.instructions = [inst]
var videoSize:CGSize = videoTrack.naturalSize;
if(isPortrait) {
print("video is portrait")
videoSize = CGSize(width: videoSize.height, height: videoSize.width)
}else{
print("video is landscape")
}
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(value: 1,timescale: 30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
func isVideoPortrait() -> Bool{
var isPortrait = false
let tracks = self.tracks(withMediaType: .video)
if(tracks.count > 0) {
let videoTrack:AVAssetTrack = tracks.first!;
let t:CGAffineTransform = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = true;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = true;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = false;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = false;
}
}
return isPortrait;
}
}