2

我有一个应用程序,我需要在其中将音频文件合并到视频文件中。

有时我的音频文件大于视频文件的持续时间。我使用了 AVFoundation 的 MixComposition,两者都被合并了。但问题是,如果视频文件的持续时间较短,那么当视频完成时,声音仍然会继续播放并完成其全部持续时间。应该是,如果视频完成,音频必须停止。

任何人都可以为我提供任何解决方案。

4

1 回答 1

2

使用以下代码停止您的音频,它还会在最后五秒钟内创建淡入淡出的音频

- (void)getFadeAudioFile {

    if (![appDelegate.musicFilePath isEqualToString:@"Not set"]) {
        NSURL *url = [[[NSURL alloc]initWithString:appDelegate.musicFilePath]autorelease];

        AVURLAsset* audioAsset = [[[AVURLAsset alloc]initWithURL:url options:nil]autorelease];

        NSString *filePath = [self applicationDocumentsDirectory];
        NSString *outputFilePath = nil;
        outputFilePath = [filePath stringByAppendingPathComponent:@"/mySong.m4a"];
        NSURL    *outputFileUrl = [[[NSURL alloc]initFileURLWithPath:outputFilePath]autorelease];

        NSError *theError = nil;

        if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
            [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:&theError];

        [self exportAsset:audioAsset toFilePath:outputFileUrl];

    }
}

- (BOOL)exportAsset:(AVAsset *)avAsset toFilePath:(NSURL *)filePath {

    // get the first audio track
    NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
    if ([tracks count] == 0) return NO;

    AVAssetTrack *track = [tracks objectAtIndex:0];

    // create the export session
    // no need to retain here since the session will be retained by the
    // completion handler since it is referenced there

    AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:avAsset presetName:AVAssetExportPresetAppleM4A];
    if (nil == exportSession) return NO;

    NSLog(@"arrOfImagesForVideo.coun:%d",arrImageDataDict.count);

    int imgCount = arrImageDataDict.count+1;
    int delay = appDelegate.delaySecond;

    int duration = imgCount*delay;

    CMTime stopTime = CMTimeMake(duration, 1);

    // create trim time range - 20 seconds starting from 30 seconds into the asset
   // NSInteger totalTime = CMTimeGetSeconds(avAsset.duration);

    CMTime startTime = CMTimeMake(0, 1);
    //CMTime stopTime = CMTimeMake(totalTime, 1);//0,30
    CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);

    // create fade in time range - 10 seconds starting at the beginning of trimmed asset
    NSInteger fadeTime = duration-5;

    NSLog(@"fade time:%d",fadeTime);
    NSLog(@"fade duration:%d",duration);

    CMTime startFadeInTime = CMTimeMake(fadeTime, 1);
    CMTime endFadeInTime = CMTimeMake(duration, 1);
    CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);

    // setup audio mix
    AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
    AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];

    [exportAudioMixInputParameters setVolumeRampFromStartVolume:1.0 toEndVolume:0.0 timeRange:fadeInTimeRange]; 
    exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];   

    // configure export session  output with all our parameters
    exportSession.outputURL = filePath; // output path
    exportSession.outputFileType = AVFileTypeAppleM4A;          // output file type
    exportSession.timeRange = exportTimeRange;                  // trim time range
    exportSession.audioMix = exportAudioMix;                    // fade in audio mix

    [exportSession exportAsynchronouslyWithCompletionHandler:
            ^(void ) {
                //[self saveVideoToAlbum:outputFilePath];
            }
            ];

    return YES;
}

它将保存在您的文件路径文档目录中并像使用它一样使用它

NSString *filePath = [self applicationDocumentsDirectory];
NSString *outputFilePath1 = [filePath tringByAppendingPathComponent:@"/mySong.m4a"];

NSURL *audio_inputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath1];
int imgCount = imageArray.count;
int delay = appDelegate.delaySecond;

NSLog(@"audio merged");
int duration = imgCount*delay;

CMTime seekingCM = CMTimeMake(duration, 1);

AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, seekingCM);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition MutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//[audioAsset autorelease];
newAudioTrack = [audioAsset tracksWithMediaType:AVMediaTypeAudio][0];
于 2013-01-17T08:27:24.247 回答