0

我有一个应用程序可以播放一些音频,并在播放声音时录制视频+音频。我想找出一种处理视频的方法,以便从生成的视频中删除麦克风拾取的音频。

例如,如果我正在播放audioA,然后用audioB(来自麦克风)录制videoB,我想以某种方式从生成的audioB中消除audioA,这样audioB只是环境噪音而不是来自设备扬声器的噪音.

知道是否有办法做到这一点?

如果可以在没有任何离线处理的情况下完成,则可以加分。

4

1 回答 1

0

你必须处理播放部分。但这里有一个将所选音频混合到录制视频的代码。

- (void)mixAudio:(AVAsset*)audioAsset startTime:(CMTime)startTime withVideo:(NSURL*)inputUrl affineTransform:(CGAffineTransform)affineTransform  toUrl:(NSURL*)outputUrl outputFileType:(NSString*)outputFileType withMaxDuration:(CMTime)maxDuration withCompletionBlock:(void(^)(NSError *))completionBlock {
    NSError * error = nil;
    AVMutableComposition * composition = [[AVMutableComposition alloc] init];

    AVMutableCompositionTrack * videoTrackComposition = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

    AVMutableCompositionTrack * audioTrackComposition = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

    AVURLAsset * fileAsset = [AVURLAsset URLAssetWithURL:inputUrl options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];

    NSArray * videoTracks = [fileAsset tracksWithMediaType:AVMediaTypeVideo];

    CMTime duration = ((AVAssetTrack*)[videoTracks objectAtIndex:0]).timeRange.duration;

    if (CMTIME_COMPARE_INLINE(duration, >, maxDuration)) {
        duration = maxDuration;
    }

    for (AVAssetTrack * track in [audioAsset tracksWithMediaType:AVMediaTypeAudio]) {
        [audioTrackComposition insertTimeRange:CMTimeRangeMake(startTime, duration) ofTrack:track atTime:kCMTimeZero error:&error];

        if (error != nil) {
            completionBlock(error);
            return;
        }
    }

    for (AVAssetTrack * track in videoTracks) {
        [videoTrackComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, duration) ofTrack:track atTime:kCMTimeZero error:&error];

        if (error != nil) {
            completionBlock(error);
            return;
        }
    }

    videoTrackComposition.preferredTransform = affineTransform;

    AVAssetExportSession * exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
    exportSession.outputFileType = outputFileType;
    exportSession.shouldOptimizeForNetworkUse = YES;
    exportSession.outputURL = outputUrl;

    [exportSession exportAsynchronouslyWithCompletionHandler:^ {
        NSError * error = nil;
        if (exportSession.error != nil) {
            NSMutableDictionary * userInfo = [NSMutableDictionary dictionaryWithDictionary:exportSession.error.userInfo];
            NSString * subLocalizedDescription = [userInfo objectForKey:NSLocalizedDescriptionKey];
            [userInfo removeObjectForKey:NSLocalizedDescriptionKey];
            [userInfo setObject:@"Failed to mix audio and video" forKey:NSLocalizedDescriptionKey];
            [userInfo setObject:exportSession.outputFileType forKey:@"OutputFileType"];
            [userInfo setObject:exportSession.outputURL forKey:@"OutputUrl"];
            [userInfo setObject:subLocalizedDescription forKey:@"CauseLocalizedDescription"];

            [userInfo setObject:[AVAssetExportSession allExportPresets] forKey:@"AllExportSessions"];

            error = [NSError errorWithDomain:@"Error" code:500 userInfo:userInfo];
        }

        completionBlock(error);
    }];
}
于 2015-04-22T20:02:46.467 回答