2

我目前有一个 iPhone 应用程序,它允许用户拍摄视频,将其上传到服务器,并允许其他人从应用程序查看他们的视频。直到我去制作一个网站来查看不同的视频(以及其他内容)之前,视频的方向从来没有问题。

我使用来自网络服务的视频,并使用 videojs 使用 ajax 加载它们,但它们中的每一个都向左旋转 90 度。从我读过的内容来看,听起来方向信息可以在 iOS 中读取,但不能在网站上读取。在发送到服务器之前,有什么方法可以在 iphone 上保存视频的新方向?

4

2 回答 2

2
//change Orientation Of video 

//in videoorientation.h 

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@interface videoorientationViewController : UIViewController
@property AVMutableComposition *mutableComposition;
@property AVMutableVideoComposition *mutableVideoComposition;
@property AVMutableAudioMix *mutableAudioMix;
@property AVAssetExportSession *exportSession;
- (void)performWithAsset : (NSURL *)moviename;
@end

In //viewcontroller.m 

- (void)performWithAsset : (NSURL *)moviename
{

    self.mutableComposition=nil;
    self.mutableVideoComposition=nil;
    self.mutableAudioMix=nil;

//    NSString* filename = [NSString stringWithFormat:@"temp1.mov"];
//    
//    NSLog(@"file name== %@",filename);
//    
//    [[NSUserDefaults standardUserDefaults]setObject:filename forKey:@"currentName"];
//    NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];

    // NSLog(@"file number %i",_currentFile);

    // NSURL* url = [NSURL fileURLWithPath:path];

    // NSString *videoURL = [[NSBundle mainBundle] pathForResource:@"Movie" ofType:@"m4v"];

    AVAsset *asset = [[AVURLAsset alloc] initWithURL:moviename options:nil];

    AVMutableVideoCompositionInstruction *instruction = nil;
    AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;
    CGAffineTransform t1;
    CGAffineTransform t2;

    AVAssetTrack *assetVideoTrack = nil;
    AVAssetTrack *assetAudioTrack = nil;
    // Check if the asset contains video and audio tracks
    if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
        assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
    }
    if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
        assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
    }

    CMTime insertionPoint = kCMTimeZero;
    NSError *error = nil;


    // Step 1
    // Create a composition with the given asset and insert audio and video tracks into it from the asset
    if (!self.mutableComposition) {

        // Check whether a composition has already been created, i.e, some other tool has already been applied
        // Create a new composition
        self.mutableComposition = [AVMutableComposition composition];

        // Insert the video and audio tracks from AVAsset
        if (assetVideoTrack != nil) {
            AVMutableCompositionTrack *compositionVideoTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
            [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
        }
        if (assetAudioTrack != nil) {
            AVMutableCompositionTrack *compositionAudioTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
            [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
        }

    }


    // Step 2
    // Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
    t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
        float width=assetVideoTrack.naturalSize.width;
    float height=assetVideoTrack.naturalSize.height;
    float toDiagonal=sqrt(width*width+height*height);
    float toDiagonalAngle = radiansToDegrees(acosf(width/toDiagonal));
    float toDiagonalAngle2=90-radiansToDegrees(acosf(width/toDiagonal));

    float toDiagonalAngleComple;
    float toDiagonalAngleComple2;
    float finalHeight = 0.0;
    float finalWidth = 0.0;

    float degrees=90;

    if(degrees>=0&&degrees<=90){

        toDiagonalAngleComple=toDiagonalAngle+degrees;
        toDiagonalAngleComple2=toDiagonalAngle2+degrees;

        finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
        finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));

        t1 = CGAffineTransformMakeTranslation(height*sinf(degreesToRadians(degrees)), 0.0);
    }
    else if(degrees>90&&degrees<=180){


        float degrees2 = degrees-90;

        toDiagonalAngleComple=toDiagonalAngle+degrees2;
        toDiagonalAngleComple2=toDiagonalAngle2+degrees2;

        finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
        finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));

        t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degrees2))+height*cosf(degreesToRadians(degrees2)), height*sinf(degreesToRadians(degrees2)));
    }
    else if(degrees>=-90&&degrees<0){

        float degrees2 = degrees-90;
        float degreesabs = ABS(degrees);

        toDiagonalAngleComple=toDiagonalAngle+degrees2;
        toDiagonalAngleComple2=toDiagonalAngle2+degrees2;

        finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
        finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));

        t1 = CGAffineTransformMakeTranslation(0, width*sinf(degreesToRadians(degreesabs)));

    }
    else if(degrees>=-180&&degrees<-90){

        float degreesabs = ABS(degrees);
        float degreesplus = degreesabs-90;

        toDiagonalAngleComple=toDiagonalAngle+degrees;
        toDiagonalAngleComple2=toDiagonalAngle2+degrees;

        finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
        finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));

        t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degreesplus)), height*sinf(degreesToRadians(degreesplus))+width*cosf(degreesToRadians(degreesplus)));

    }


    // Rotate transformation
    t2 = CGAffineTransformRotate(t1, degreesToRadians(degrees));
    //t2 = CGAffineTransformRotate(t1, -90);


    // Step 3
    // Set the appropriate render sizes and rotational transforms
    if (!self.mutableVideoComposition) {

        // Create a new video composition
        self.mutableVideoComposition = [AVMutableVideoComposition videoComposition];
        // self.mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.height,assetVideoTrack.naturalSize.width);
        self.mutableVideoComposition.renderSize = CGSizeMake(finalWidth,finalHeight);

        self.mutableVideoComposition.frameDuration = CMTimeMake(1,30);

        // The rotate transform is set on a layer instruction
        instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [self.mutableComposition duration]);
        layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(self.mutableComposition.tracks)[0]];
        [layerInstruction setTransform:t2 atTime:kCMTimeZero];

    } else {

        self.mutableVideoComposition.renderSize = CGSizeMake(self.mutableVideoComposition.renderSize.height, self.mutableVideoComposition.renderSize.width);

        // Extract the existing layer instruction on the mutableVideoComposition
        instruction = (self.mutableVideoComposition.instructions)[0];
        layerInstruction = (instruction.layerInstructions)[0];

        // Check if a transform already exists on this layer instruction, this is done to add the current transform on top of previous edits
        CGAffineTransform existingTransform;

        if (![layerInstruction getTransformRampForTime:[self.mutableComposition duration] startTransform:&existingTransform endTransform:NULL timeRange:NULL]) {
            [layerInstruction setTransform:t2 atTime:kCMTimeZero];
        } else {
            // Note: the point of origin for rotation is the upper left corner of the composition, t3 is to compensate for origin
            CGAffineTransform t3 = CGAffineTransformMakeTranslation(-1*assetVideoTrack.naturalSize.height/2, 0.0);
            CGAffineTransform newTransform = CGAffineTransformConcat(existingTransform, CGAffineTransformConcat(t2, t3));
            [layerInstruction setTransform:newTransform atTime:kCMTimeZero];
        }

    }


    // Step 4
    // Add the transform instructions to the video composition
    instruction.layerInstructions = @[layerInstruction];
    self.mutableVideoComposition.instructions = @[instruction];


    // Step 5
    // Notify AVSEViewController about rotation operation completion
    // [[NSNotificationCenter defaultCenter] postNotificationName:AVSEEditCommandCompletionNotification object:self];

    [self performWithAssetExport];
}

- (void)performWithAssetExport
{
    // Step 1
    // Create an outputURL to which the exported movie will be saved

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *outputURL = paths[0];
    NSFileManager *manager = [NSFileManager defaultManager];
    [manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
    outputURL = [outputURL stringByAppendingPathComponent:@"output.mov"];
    // Remove Existing File
    [manager removeItemAtPath:outputURL error:nil];


    // Step 2
    // Create an export session with the composition and write the exported movie to the photo library
    self.exportSession = [[AVAssetExportSession alloc] initWithAsset:[self.mutableComposition copy] presetName:AVAssetExportPreset1280x720];

    self.exportSession.videoComposition = self.mutableVideoComposition;
    self.exportSession.audioMix = self.mutableAudioMix;
    self.exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
    self.exportSession.outputFileType=AVFileTypeQuickTimeMovie;


    [self.exportSession exportAsynchronouslyWithCompletionHandler:^(void){
        switch (self.exportSession.status) {
            case AVAssetExportSessionStatusCompleted:

                //[self playfunction];

                [[NSNotificationCenter defaultCenter]postNotificationName:@"Backhome" object:nil];



                // Step 3
                // Notify AVSEViewController about export completion
                break;
            case AVAssetExportSessionStatusFailed:
                NSLog(@"Failed:%@",self.exportSession.error);
                break;
            case AVAssetExportSessionStatusCancelled:
                NSLog(@"Canceled:%@",self.exportSession.error);
                break;
            default:
                break;
        }
    }];



}
于 2016-04-20T09:49:37.793 回答
1

似乎出现此问题是因为 videojs 无法读取方向。这里的信息:http: //help.videojs.com/discussions/problems/1508-video-orientation-for-iphone-wrong

根据隐含的解决方案,您应该检查以确保在保存视频时使用 AVFramework 设置方向值。之前的堆栈溢出帖子中提供了有关如何执行此操作的信息:如何使用 AVFoundation 为逐帧生成的视频设置方向?

于 2012-06-27T18:28:59.307 回答