7

应用AVVideoComposition到 my后AVPlayerItem,我应用的过滤器确实有效,但视频在AVPlayerLayer.

我知道问题不在于过滤后的帧,因为如果我在 a 中显示帧UIImageView,则该帧会 100% 正确呈现。

在我应用. videoComposition设置videoGravityonAVPlayerLayer无济于事。

视频顺时针旋转 90º 并在图层中拉伸。

本质上,视频AVPlayerLayerAVPlayerItem通过AVMutableVideoComposition. 一旦发生这种情况,视频将旋转 -90º,然后缩放以适应与过滤前的视频相同的尺寸。这表明它没有意识到它的变换已经正确,因此它正在对自身重新应用变换。

为什么会发生这种情况,我该如何解决?

这是一些代码:

private func filterVideo(with filter: Filter?) {
    if let player = player, let playerItem = player.currentItem {
        let composition = AVMutableComposition()
        let videoAssetTrack = playerItem.asset.tracks(withMediaType: .video).first
        let videoCompositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
        try? videoCompositionTrack?.insertTimeRange(CMTimeRange(start: kCMTimeZero, duration: playerItem.asset.duration), of: videoAssetTrack!, at: kCMTimeZero)
        videoCompositionTrack?.preferredTransform = videoAssetTrack!.preferredTransform

        let videoComposition = AVMutableVideoComposition(asset: composition, applyingCIFiltersWithHandler: { (request) in
            let filteredImage = <...>
            request.finish(with: filteredImage, context: nil)
        })

        playerItem.videoComposition = videoComposition
    }
}
4

5 回答 5

8

您的 .renderSize 有问题AVVideoComposition。您应该在AVMutableVideoCompositionInstruction(即Rotatetranslate transform )上应用 transform 。

我已经在Objective-c中完成了它并发布了我的代码。您可以将语法转换为Swift

目标-c

//------------------------------------
//      FIXING ORIENTATION
//------------------------------------


AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));

AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack]; // second

AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_  = UIImageOrientationUp;
BOOL  isFirstAssetPortrait_  = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)  {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)  {FirstAssetOrientation_ =  UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)   {FirstAssetOrientation_ =  UIImageOrientationUp;}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_){
    FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.height;
    CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
    [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
    CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
    [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];



AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *SecondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation SecondAssetOrientation_  = UIImageOrientationUp;
BOOL  isSecondAssetPortrait_  = NO;
CGAffineTransform secondTransform = SecondAssetTrack.preferredTransform;
if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0)  {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0)  {SecondAssetOrientation_ =  UIImageOrientationLeft; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0)   {SecondAssetOrientation_ =  UIImageOrientationUp;}
if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;}
CGFloat SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_){
    SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.height;
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
    [SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration];
}else{
    ;
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
    [SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:secondAsset.duration];
}


MainInstruction.layerInstructions = [NSArray arrayWithObjects:SecondlayerInstruction,nil];;

AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 480.0);


// Now , you have Orientation Fixed Instrucation layer
// add this composition to your video 
// If you want to export Video than you can do like below

NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"final_merged_video-%d.mp4",arc4random() % 1000]];


NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPreset640x480];
exporter.outputURL=url;
exporter.videoComposition=MainCompositionInst;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^
{

[[AppDelegate Getdelegate] hideIndicator];

[self exportDidFinish:exporter];
});
}];

对于Swift 看这个答案点击这里

此外,您还可以尝试通过对其应用旋转变换来旋转视频层。

#define degreeToRadian(x) (M_PI * x / 180.0)

[_playerLayer setAffineTransform:CGAffineTransformMakeRotation(degreeToRad‌​ian(degree))]
于 2017-07-12T12:43:09.643 回答
4

如果你想玩AVMutableCompostion你应该将AVAssetTrack's设置preferredTransformAVMutableCompositionTrack's preferredTransform

let asset = AVAsset(url: url!)

let composition = AVMutableComposition()
let compositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)

let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first

try? compositionTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: videoTrack!, at: kCMTimeZero)

compositionTrack.preferredTransform = (videoTrack?.preferredTransform)!

let playerItem = AVPlayerItem(asset: composition)
let filter = CIFilter(name: "CIColorInvert")
playerItem.videoComposition = AVVideoComposition(asset: composition, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
            filter?.setValue(request.sourceImage, forKey: kCIInputImageKey)
            request.finish(with: (filter?.outputImage)!, context: nil)
        })
 .... the rest of code
于 2017-07-04T19:31:57.043 回答
3

不要假设图像将被过滤,而是首先检查 if filteredImageis nil。如果没有,那么request.finish(with: filteredImage, context: nil)

但是,如果是nil你必须request.finish(with: SomeError)

这是根据文档。

于 2017-07-07T02:32:14.947 回答
2

最后对我有用的是:

private func filterVideo(with filter: Filter?) {
    guard let player = playerLayer?.player, let playerItem = player.currentItem else { return }

    let videoComposition = AVVideoComposition(asset: playerItem.asset, applyingCIFiltersWithHandler: { (request) in
        if let filter = filter {
            if let filteredImage = filter.filterImage(request.sourceImage) {
                let output = filteredImage.cropping(to: request.sourceImage.extent)
                request.finish(with: output, context: nil)
            } else {
                printError("Image not filtered")
                request.finish(with: RenderError.couldNotFilter)
            }
        } else {
            let output = request.sourceImage.cropping(to: request.sourceImage.extent)
            request.finish(with: output, context: nil)
        }
    })

    playerItem.videoComposition = videoComposition
}

这是 的filterImage函数Filter,它只是一个很好的小包装CIFilter

func filterImage(_ ciImage: CIImage) -> CIImage? {
    guard let filter = ciFilter else { return nil }
    filter.setDefaults()
    filter.setValue(ciImage, forKey: kCIInputImageKey)
    guard let filteredImageData = filter.value(forKey: kCIOutputImageKey) as? CIImage else { return nil }
    return filteredImageData
}
于 2018-06-11T10:57:40.997 回答
-2

试试下面这对我有用的代码

// Grab the source track from AVURLAsset for example.
let assetV = YourAVASSET.tracks(withMediaType: AVMediaTypeVideo).last

// Grab the composition video track from AVMutableComposition you already made.
let compositionV = YourCompostion.tracks(withMediaType: AVMediaTypeVideo).last

// Apply the original transform.
if ((assetV != nil) && (compostionV != nil)) {
    compostionV?.preferredTransform = (assetV?.preferredTransform)!
}

然后继续导出您的视频...

于 2017-11-08T10:40:30.613 回答