11

我知道如何使用AVAssetReaderand AVAssetWriter,并且已经成功地使用它们从一部电影中抓取视频轨道并将其转码为另一部电影。但是,我也想用音频来做这件事。我是否必须AVAssetExportSession在完成初始转码后创建,或者在写作过程中是否有某种方法可以在轨道之间切换?我讨厌不得不处理AVAssetExportSession.

我问是因为,使用拉式方法 - while ([assetWriterInput isReadyForMoreMediaData]) {...}- 只假设一个轨道。它怎么能用于多个轨道,即音频和视频轨道?

4

3 回答 3

8

AVAssetWriter将自动在其关联的 s 上交错请求AVAssetWriterInput,以便将不同的轨道集成到输出文件中。只需AVAssetWriterInput为您拥有的每个曲目添加一个,然后调用requestMediaDataWhenReadyOnQueue:usingBlock:您的每个AVAssetWriterInputs.

这是我调用的一种方法requestMediaDataWhenReadyOnQueue:usingBlock:。我从我拥有的输出/输入对数量的循环中调用此方法。(单独的方法对代码可读性都有好处,也因为与循环不同,每次调用都会为块设置一个单独的堆栈帧。)

您只需要一个dispatch_queue_t并且可以将其重复用于所有轨道。请注意,您绝对不应该您的块中调用dispatch_async,因为requestMediaDataWhenReadyOnQueue:usingBlock:期望块会阻塞,直到它填充了所需的数据AVAssetWriterInput量。你不想在那之前回来。

- (void)requestMediaDataForTrack:(int)i {
  AVAssetReaderOutput *output = [[_reader outputs] objectAtIndex:i];
  AVAssetWriterInput *input = [[_writer inputs] objectAtIndex:i];

  [input requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
    ^{
      [self retain];
      while ([input isReadyForMoreMediaData]) {
        CMSampleBufferRef sampleBuffer;
        if ([_reader status] == AVAssetReaderStatusReading &&
            (sampleBuffer = [output copyNextSampleBuffer])) {

          BOOL result = [input appendSampleBuffer:sampleBuffer];
          CFRelease(sampleBuffer);

          if (!result) {
            [_reader cancelReading];
            break;
          }
        } else {
          [input markAsFinished];

          switch ([_reader status]) {
            case AVAssetReaderStatusReading:
              // the reader has more for other tracks, even if this one is done
              break;

            case AVAssetReaderStatusCompleted:
              // your method for when the conversion is done
              // should call finishWriting on the writer
              [self readingCompleted];
              break;

            case AVAssetReaderStatusCancelled:
              [_writer cancelWriting];
              [_delegate converterDidCancel:self];
              break;

            case AVAssetReaderStatusFailed:
              [_writer cancelWriting];
              break;
          }

          break;
        }
      }
    }
  ];
}
于 2011-04-09T01:20:41.857 回答
1

Have you tried using two AVAssetWriterInputs and pushing the samples through a worker queue? Here is a rough sketch.

processing_queue = dispatch_queue_create("com.mydomain.gcdqueue.mediaprocessor", NULL);

[videoAVAssetWriterInput requestMediaDataWhenReadyOnQueue:myInputSerialQueue usingBlock:^{
    dispatch_asyc(processing_queue, ^{process video});
}];

[audioAVAssetWriterInput requestMediaDataWhenReadyOnQueue:myInputSerialQueue usingBlock:^{
    dispatch_asyc(processing_queue, ^{process audio});
}];
于 2011-03-14T06:36:42.927 回答
0

您可以使用调度组!

查看 MacOSX 的 AVReaderWriter 示例...

我直接从示例 RWDocument.m 中引用:

- (BOOL)startReadingAndWritingReturningError:(NSError **)outError
{
    BOOL success = YES;
    NSError *localError = nil;

    // Instruct the asset reader and asset writer to get ready to do work
    success = [assetReader startReading];
    if (!success)
        localError = [assetReader error];
    if (success)
    {
        success = [assetWriter startWriting];
        if (!success)
            localError = [assetWriter error];
    }

    if (success)
    {
        dispatch_group_t dispatchGroup = dispatch_group_create();

        // Start a sample-writing session
        [assetWriter startSessionAtSourceTime:[self timeRange].start];

        // Start reading and writing samples
        if (audioSampleBufferChannel)
        {
            // Only set audio delegate for audio-only assets, else let the video channel drive progress
            id <RWSampleBufferChannelDelegate> delegate = nil;
            if (!videoSampleBufferChannel)
                delegate = self;

            dispatch_group_enter(dispatchGroup);
            [audioSampleBufferChannel startWithDelegate:delegate completionHandler:^{
                dispatch_group_leave(dispatchGroup);
            }];
        }
        if (videoSampleBufferChannel)
        {
            dispatch_group_enter(dispatchGroup);
            [videoSampleBufferChannel startWithDelegate:self completionHandler:^{
                dispatch_group_leave(dispatchGroup);
            }];
        }

        // Set up a callback for when the sample writing is finished
        dispatch_group_notify(dispatchGroup, serializationQueue, ^{
            BOOL finalSuccess = YES;
            NSError *finalError = nil;

            if (cancelled)
            {
                [assetReader cancelReading];
                [assetWriter cancelWriting];
            }
            else
            {
                if ([assetReader status] == AVAssetReaderStatusFailed)
                {
                    finalSuccess = NO;
                    finalError = [assetReader error];
                }

                if (finalSuccess)
                {
                    finalSuccess = [assetWriter finishWriting];
                    if (!finalSuccess)
                        finalError = [assetWriter error];
                }
            }

            [self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError];
        });

        dispatch_release(dispatchGroup);
    }

    if (outError)
        *outError = localError;

    return success;
}
于 2012-09-16T15:54:59.233 回答