2

在我坐下来阅读有关 CoreAudio 的整本书之前,我想知道它是否是我学习的最佳框架,或者 AVFoundation 是否可以满足我的需要。我希望能够下载位于远程服务器上的 MP3 的一小部分,比如说 20 秒的文件,最好不要先下载整个文件然后修剪它。

然后我想将 2 个音频轨道分层,然后将它们作为一个文件反弹。

我是否需要深入研究 CoreAudio 或者 AVFoundation 可以这样吗?建议非常感谢。

4

1 回答 1

2

文件的下载部分由您决定,但如果您想将 2 个或多个音频文件混合为一个,AVFoundation 可能是最简单的方法,AVAssetExportSession用于进行导出和AVMutableAudioMix混合。有一些例子一个简单的编辑器的代码漂浮在苹果文档中,但似乎找不到它,如果我找到了,我会发布链接..

这是一种实际进行混合的方法,请记住,我也在此处添加视频,_audioTracks 和 _videoTracks 是其中包含 AVAssets 的可变数组

-(void)createMix
{

    CGSize videoSize = [[_videoTracks objectAtIndex:0] naturalSize];
    AVMutableComposition *composition = [AVMutableComposition composition];
    AVMutableVideoComposition *videoComposition = nil;
    AVMutableAudioMix *audioMix = nil;

    composition.naturalSize = videoSize;


    AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];



    AVAsset *videoAsset=[_videoTracks objectAtIndex:0];
   CMTimeRange  timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [videoAsset duration]);
        AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        [compositionVideoTrack insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];

        AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
        [compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];


    NSMutableArray *trackMixArray = [NSMutableArray array];

    if(_audioTracks && _audioTracks.count>0)
    {
        for(AVAsset *audio in _audioTracks)
        {
          //     CMTimeRange  timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [audio duration]);
           // AVAssetTrack *clipAudioTrack = [[audio tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
            //[compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];


            NSInteger i;
            NSArray *tracksToDuck = [audio tracksWithMediaType:AVMediaTypeAudio]; // before we add the commentary

            // Clip commentary duration to composition duration.
            CMTimeRange commentaryTimeRange = CMTimeRangeMake(kCMTimeZero, audio.duration);
            if (CMTIME_COMPARE_INLINE(CMTimeRangeGetEnd(commentaryTimeRange), >, [composition duration]))
                commentaryTimeRange.duration = CMTimeSubtract([composition duration], commentaryTimeRange.start);

            // Add the commentary track.
            AVMutableCompositionTrack *compositionCommentaryTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
            [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, commentaryTimeRange.duration) ofTrack:[[audio tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:commentaryTimeRange.start error:nil];



            CMTime rampDuration = CMTimeMake(1, 2); // half-second ramps
            for (i = 0; i < [tracksToDuck count]; i++) {
                AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:[tracksToDuck objectAtIndex:i]];
                [trackMix setVolumeRampFromStartVolume:1.0 toEndVolume:0.2 timeRange:CMTimeRangeMake(CMTimeSubtract(commentaryTimeRange.start, rampDuration), rampDuration)];
                [trackMix setVolumeRampFromStartVolume:0.2 toEndVolume:1.0 timeRange:CMTimeRangeMake(CMTimeRangeGetEnd(commentaryTimeRange), rampDuration)];
                [trackMixArray addObject:trackMix];

            }


        }
    }

     // audioMix.inputParameters = trackMixArray;

    if (videoComposition) {
        // Every videoComposition needs these properties to be set:
        videoComposition.frameDuration = CMTimeMake(1, 30); // 30 fps
        videoComposition.renderSize = videoSize;
    }


    AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPreset1280x720];
    session.videoComposition = videoComposition;
    session.audioMix = audioMix;

    NSUInteger count = 0;
    NSString *filePath;
    do {
        filePath = NSTemporaryDirectory();

        NSString *numberString = count > 0 ? [NSString stringWithFormat:@"-%i", count] : @"";
        filePath = [filePath stringByAppendingPathComponent:[NSString stringWithFormat:@"Output-%@.mp4", numberString]];
        count++;
    } while([[NSFileManager defaultManager] fileExistsAtPath:filePath]);      

    session.outputURL = [NSURL fileURLWithPath:filePath];
    session.outputFileType = AVFileTypeQuickTimeMovie;

     [session exportAsynchronouslyWithCompletionHandler:^
     {
         dispatch_async(dispatch_get_main_queue(), ^{
             NSLog(@"Exported");
             if(session.error)
             {
                 NSLog(@"had an error %@", session.error);
             }
             if(delegate && [delegate respondsToSelector:@selector(didFinishExportingMovie:)])
             {
                 [delegate didFinishExportingMovie:filePath];
             }


     });
     }];



}

希望能帮助到你..

丹尼尔

于 2012-08-23T15:43:59.357 回答