2

我正在尝试合并两个最初具有不同方向的视频。

视频 A - 纵向 - 720x1280 - MOV 视频 B - 横向 - 640x480 - MP4

我从调整大小和裁剪视频 A 开始

- (void)resizeWithStyle:(NSString*)style {
NSString *filePath = [self.lastVideo path];
NSString *newPath = [filePath stringByReplacingOccurrencesOfString:@".mov" withString:@".mp4"];
NSURL *fullPath = [NSURL fileURLWithPath:newPath];
NSURL *path = [NSURL fileURLWithPath:filePath];

NSDictionary *options = @{ AVURLAssetPreferPreciseDurationAndTimingKey : @YES };
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:self.lastVideo options:options];

NSInteger width = 640;
NSInteger height = 480;

if (([self orientationForTrack:asset] == UIInterfaceOrientationPortrait) || ([self orientationForTrack:asset] == UIInterfaceOrientationPortraitUpsideDown)) {
    width = 480;
    height = 640;
}

NSLog(@"Write Started");

NSError *error = nil;

NSString *styleKey = AVVideoScalingModeResizeAspectFill;
if ([style isEqualToString:@"fit"]) {
    styleKey = AVVideoScalingModeResizeAspect;
}

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:fullPath fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
AVAsset *avAsset = [[AVURLAsset alloc] initWithURL:path options:nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:width], AVVideoWidthKey,
                               [NSNumber numberWithInt:height], AVVideoHeightKey,
                               styleKey, AVVideoScalingModeKey,
                               nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
NSError *aerror = nil;
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:avAsset error:&aerror];
AVAssetTrack *videoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0];
videoWriterInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
[reader addOutput:asset_reader_output];
//audio setup

AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeAudio
                                        outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:avAsset error:&error];
AVAssetTrack* audioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];

[audioReader addOutput:readerOutput];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
[reader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
 ^{
     while ([videoWriterInput isReadyForMoreMediaData]) {
         CMSampleBufferRef sampleBuffer;
         if ([reader status] == AVAssetReaderStatusReading &&
             (sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {

             BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
             CFRelease(sampleBuffer);

             if (!result) {
                 [reader cancelReading];
                 break;
             }
         } else {
             [videoWriterInput markAsFinished];

             switch ([reader status]) {
                 case AVAssetReaderStatusReading:
                     // the reader has more for other tracks, even if this one is done
                     break;
                 case AVAssetReaderStatusFailed:
                     [videoWriter cancelWriting];
                     break;
                 case AVAssetReaderStatusCompleted:
                     // your method for when the conversion is done
                     // should call finishWriting on the writer
                     //hook up audio track
                     [audioReader startReading];
                     [videoWriter startSessionAtSourceTime:kCMTimeZero];
                     dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
                     [audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
                      {
                          NSLog(@"Request");
                          NSLog(@"Asset Writer ready :%d",audioWriterInput.readyForMoreMediaData);
                          while (audioWriterInput.readyForMoreMediaData) {
                              CMSampleBufferRef nextBuffer;
                              if ([audioReader status] == AVAssetReaderStatusReading &&
                                  (nextBuffer = [readerOutput copyNextSampleBuffer])) {
                                  NSLog(@"Ready");
                                  if (nextBuffer) {
                                      NSLog(@"NextBuffer");
                                      [audioWriterInput appendSampleBuffer:nextBuffer];
                                  }
                              }else{
                                  [audioWriterInput markAsFinished];
                                  switch ([audioReader status]) {
                                      case AVAssetReaderStatusCompleted:
                                          [videoWriter finishWritingWithCompletionHandler:^{
                                              if (videoWriter.status != AVAssetWriterStatusFailed && videoWriter.status == AVAssetWriterStatusCompleted) {
                                                  NSLog(@"Asset written");
                                                  NSLog(@"New Asset Orienatation: %d", [self orientationForTrack:asset]);
                                                  [self checkFileExists:fullPath];
                                                  [self getVideoProperties:fullPath];
                                                  self.lastVideo = fullPath;
                                                  //[self showDocumentsContents];
                                                  self.libraryVideo = fullPath;
                                              } else {
                                              }
                                          }];
                                          break;
                                  }
                              }
                          }

                      }
                      ];
                     break;
             }

             break;
         }
     }
 }
 ];
NSLog(@"Write Ended");
}

这看起来运行良好,并且看起来完全符合我想要的填充或适合 H264 (mp4) 格式的 640x480 的能力。在 MPMoviePlayerController 中播放时,视频现在显示为横向格式。

接下来,我正在尝试加入这两个视频。

- (void)joinVideo:(id)sender {
if ((self.libraryVideo != nil) && (self.recordVideo != nil)) {
    NSString *libraryPath = [self.libraryVideo path];
    NSString *outputPath = [libraryPath stringByReplacingOccurrencesOfString:@".mp4" withString:@"-joined.mp4"];
    NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
    NSLog(@"Can Merge Video");
    NSMutableArray *audioTracks = [NSMutableArray array];
    NSMutableArray *videoTracks = [NSMutableArray array];
    NSDictionary *options = @{ AVURLAssetPreferPreciseDurationAndTimingKey : @YES };
    AVURLAsset *asset2 = [AVURLAsset URLAssetWithURL:self.recordVideo options:options];
    [videoTracks addObjectsFromArray:[asset2 tracksWithMediaType:AVMediaTypeVideo]];
    [audioTracks addObjectsFromArray:[asset2 tracksWithMediaType:AVMediaTypeAudio]];
    NSLog(@"Asset 2 Orienatation: %d", [self orientationForTrack:asset2]);
    AVURLAsset *asset1 = [AVURLAsset URLAssetWithURL:self.libraryVideo options:options];
    [videoTracks addObjectsFromArray:[asset1 tracksWithMediaType:AVMediaTypeVideo]];
    [audioTracks addObjectsFromArray:[asset1 tracksWithMediaType:AVMediaTypeAudio]];
    NSLog(@"Asset 1 Orienatation: %d", [self orientationForTrack:asset1]);

    AVMutableComposition *composition = [[AVMutableComposition alloc] init];

    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = CMTimeMake(1,30);
    videoComposition.renderScale = 1.0;

    if ([audioTracks count] > 0) {
        AVMutableCompositionTrack * audioTrackComposition = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

        [audioTracks enumerateObjectsUsingBlock:^(AVAssetTrack *track, NSUInteger idx, BOOL *stop) {
            [audioTrackComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, track.timeRange.duration)
                                           ofTrack:track
                                            atTime:kCMTimeZero
                                             error:nil];
        }];
    }

    AVMutableCompositionTrack *videoTrackComposition = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                                preferredTrackID:kCMPersistentTrackID_Invalid];

    [videoTracks enumerateObjectsUsingBlock:^(AVAssetTrack *track, NSUInteger idx, BOOL *stop) {
        [videoTrackComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, track.timeRange.duration)
                                       ofTrack:track
                                        atTime:kCMTimeZero
                                         error:nil];
    }];

    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrackComposition];

    AVAssetTrack *sourceVideoTrack = [[asset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI/2);
    CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);

    [videoTrackComposition setPreferredTransform:sourceVideoTrack.preferredTransform];
    [layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];

    instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
    videoComposition.instructions = [NSArray arrayWithObject: instruction];

    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition
                                                                           presetName:AVAssetExportPresetPassthrough];
    exportSession.outputFileType = AVFileTypeMPEG4;
    exportSession.shouldOptimizeForNetworkUse = YES;
    exportSession.outputURL = outputURL;

    [exportSession exportAsynchronouslyWithCompletionHandler:^ {
        switch (exportSession.status) {
            case AVAssetExportSessionStatusFailed: {
                NSLog(@"Join Failed");
                break;
            }
            case AVAssetExportSessionStatusCompleted: {
                NSLog(@"Join Completed");
                [self checkFileExists:outputURL];
                [self getVideoProperties:outputURL];
                self.lastVideo = outputURL;
                break;
            }
            case AVAssetExportSessionStatusCancelled: {
                NSLog(@"Join Cancelled");
                break;
            }
            default:
                break;
        }
    }];
}
}

这成功地加入了视频。但是,视频 A 现在旋转了 90 度,而视频 B 看起来正确。

我花了很多时间试图弄清楚为什么视频 A 在此合并中没有被视为真正的 640x480 横向视频。我尝试在视频 A 上运行单独的操作以强制更改 preferredTransform,但它似乎没有做任何事情。此外,在原始 AVAssetWriterInput 上设置首选转换似乎也没有影响。

对真正有效的东西有什么想法吗?我尝试过的一切都以视频 A 结束,也旋转了 90 度并在合并时拉伸。

有什么方法可以强制从照片库输入方向?

谢谢!

4

0 回答 0