0

我正在制作一个向视频添加主题音乐的应用程序。

有些用户抱怨如果他们的音乐是苹果无损格式,视频会太大。

我发现这是因为AVMutableComposition我使用的只是将原始音乐格式放入我生成的视频中。

那么有什么办法可以降低 MPMediaItem 中音乐的比特率,或者改变它的编码格式?

这是我用来向视频添加音乐的代码片段。

AVMutableComposition* mixComposition = [AVMutableComposition composition];

AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                                    ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                                     atTime:kCMTimeZero error:nil];

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                               preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                               ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                                atTime:kCMTimeZero error:nil];

AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                      presetName:AVAssetExportPresetPassthrough];

NSURL    *exportUrl = [NSURL fileURLWithPath:_videoOutputPath];

if ([[NSFileManager defaultManager] fileExistsAtPath:_videoOutputPath]){
    [[NSFileManager defaultManager] removeItemAtPath:_videoOutputPath error:nil];
}

_assetExport.outputFileType = @"com.apple.quicktime-movie";

_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;

[_assetExport exportAsynchronouslyWithCompletionHandler:^(void ) {}
4

1 回答 1

0

我终于明白了,这是我使用的代码:

static NSString * const kWriterInputIsReadyForMoreData = @"readyForMoreMediaData";

#import <AVFoundation/AVFoundation.h>
@implementation AudioUtil
{
    AVAssetReader *_assetReader;
    AVAssetWriter *_assetWriter;
    AVAssetWriterInput *_assetWriterInput;
    AVAssetReaderTrackOutput *_readerOutput;
    void (^_callback)(BOOL);
    CMSampleBufferRef _sampleBufferToAppend;
}

-(void)downSamplingAudioWithSourceURL:(NSURL *)sourceURL destinationURL:(NSURL *)destURL timeRange:(CMTimeRange)timeRange  callBack:(void (^)(BOOL))callback
{
    NSError *error = nil;
    _callback = callback;

    [[NSFileManager defaultManager] removeItemAtURL:destURL error:nil];

//initialize reader
AVURLAsset *inputAsset = [AVURLAsset assetWithURL:sourceURL];
_assetReader = [[AVAssetReader alloc] initWithAsset:inputAsset error:&error];
_assetReader.timeRange = timeRange;
AVAssetTrack* track = [[inputAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

NSMutableDictionary* audioReadSettings = [NSMutableDictionary dictionary];
audioReadSettings[AVFormatIDKey] = @(kAudioFormatLinearPCM);
audioReadSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);

_readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
NSAssert([_assetReader canAddOutput:_readerOutput], @"reader can't add output");
[_assetReader addOutput:_readerOutput];

//initialize writer
_assetWriter = [[AVAssetWriter alloc] initWithURL:destURL fileType:[QLVideoFormatProvider audioFileType] error:nil];

NSMutableDictionary *audioOutputSettings = [NSMutableDictionary dictionary];
audioOutputSettings[AVFormatIDKey] = [QLVideoFormatProvider audioFormatKeyForEncoder];
audioOutputSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);
audioOutputSettings[AVSampleRateKey] = @([QLVideoFormatProvider audioSampleRate]);
audioOutputSettings[AVEncoderBitRateKey] = @([QLVideoFormatProvider audioBitrate]);

_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
[_assetWriter addInput:_assetWriterInput];

//start
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
BOOL canStartReading = [_assetReader startReading];
NSLog(@"can start reading %d",canStartReading);
if (!canStartReading) {
    callback(NO);
    return;
}

[_assetWriterInput addObserver:self forKeyPath:kWriterInputIsReadyForMoreData options:NSKeyValueObservingOptionOld|NSKeyValueObservingOptionNew context:NULL];
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];

    [self appendBufferToAppend];
}

-(void)appendBufferToAppend
{
    if ([_assetWriterInput isReadyForMoreMediaData]) {
        if (_sampleBufferToAppend) {
            [_assetWriterInput appendSampleBuffer:_sampleBufferToAppend];
            CFRelease(_sampleBufferToAppend);
        }
        _sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
        if (_sampleBufferToAppend) {
            [self appendBufferToAppend];
        }
        else {
            [_assetWriter finishWritingWithCompletionHandler:^(){
                if (_callback) {
                    _callback(_assetWriter.status == AVAssetWriterStatusCompleted);
                };
            }];
        }
    }
    else {

    }
}

-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
    if ([keyPath isEqualToString:kWriterInputIsReadyForMoreData]) {
        if ([change[NSKeyValueChangeNewKey] boolValue] == YES) {
            [self appendBufferToAppend];
        }
    }
}
于 2013-08-09T05:50:03.257 回答