3

我猜“ AudioConverterFillComplexBuffer”是解决方案。

但我不知道这种方式是否正确。

+1。音频单元

初始化 AudioUnit :“recordingCallback”是回调方法。输出格式为 PCM。记录到文件。(我播放了录制的文件)。

+2。音频转换器

添加“AudioConverterFillComplexBuffer”我不太了解。添加,

+3。问题

“audioConverterComplexInputDataProc”方法只调用了一次。

如何使用 AudioConverter api?

附上我的代码

#import "AACAudioRecorder.h"
#define kOutputBus 0
#define kInputBus 1
@implementation AACAudioRecorder
            
            

这是 AudioConverterFillComplexBuffer 的回调方法。

static OSStatus audioConverterComplexInputDataProc(  AudioConverterRef               inAudioConverter,
                                      UInt32*                         ioNumberDataPackets,
                                      AudioBufferList*                ioData,
                                      AudioStreamPacketDescription**  outDataPacketDescription,
                                      void*                           inUserData){
    ioData = (AudioBufferList*)inUserData;
    return 0;
}

这是 AudioUnit 的回调。

static OSStatus recordingCallback(void *inRefCon, 
                                        AudioUnitRenderActionFlags *ioActionFlags, 
                                        const AudioTimeStamp *inTimeStamp, 
                                        UInt32 inBusNumber, 
                                        UInt32 inNumberFrames, 
                                        AudioBufferList *ioData) {
   @autoreleasepool {

       AudioBufferList *bufferList;
       
       AACAudioRecorder *THIS = (AACAudioRecorder *)inRefCon;
       OSStatus err = AudioUnitRender(THIS-> m_audioUnit , 
                                             ioActionFlags, 
                                             inTimeStamp, 1, inNumberFrames, bufferList);    

       if (err) { NSLog(@"%s AudioUnitRender error %d\n",__FUNCTION__, (int)err); return err; }
       
       NSString *recordFile = 
                       [NSTemporaryDirectory() stringByAppendingPathComponent: @"auioBuffer.pcm"];
       FILE *fp;
       fp = fopen([recordFile UTF8String], "a+");
       fwrite(bufferList->mBuffers[0].mData, sizeof(Byte), 
bufferList->mBuffers[0].mDataByteSize, fp);
       fclose(fp);    

       [THIS convert:bufferList ioOutputDataPacketSize:&inNumberFrames];     

   if (err) {NSLog(@"%s : AudioFormat Convert error %d\n",__FUNCTION__, (int)err);  }
    }
    return noErr;
}

状态检查方法

static void checkStatus(OSStatus status, const char* str){
    if (status != noErr) {
        NSLog(@"%s %s error : %ld ",__FUNCTION__, str, status);
    }
}

转换方法:PCM -> AAC

- (void)convert:(AudioBufferList*)input_bufferList ioOutputDataPacketSize:(UInt32*)packetSizeRef 
{
    UInt32 size = sizeof(UInt32);
    UInt32 maxOutputSize;
    AudioConverterGetProperty(m_audioConverterRef, 
                              kAudioConverterPropertyMaximumOutputPacketSize, 
                              &size, 
                              &maxOutputSize);
    
    AudioBufferList *output_bufferList = (AudioBufferList *)malloc(sizeof(AudioBufferList));

    output_bufferList->mNumberBuffers               = 1;
    output_bufferList->mBuffers[0].mNumberChannels  = 1;
    output_bufferList->mBuffers[0].mDataByteSize    = *packetSizeRef * 2;
    output_bufferList->mBuffers[0].mData  = (AudioUnitSampleType *)malloc(*packetSizeRef * 2);

    OSStatus        err;
    err = AudioConverterFillComplexBuffer(
                                          m_audioConverterRef,
                                          audioConverterComplexInputDataProc,
                                          input_bufferList,
                                          packetSizeRef,
                                          output_bufferList,
                                          NULL
                                          );


    if (err) {NSLog(@"%s : AudioFormat Convert error %d\n",__FUNCTION__, (int)err);  }
}

这是初始化方法。

- (void)initialize
{ 
    // ...
    
    
    OSStatus status;
    
    // Describe audio component
    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_RemoteIO;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;
    
    // Get component
    AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
    
    // Get audio units
    status = AudioComponentInstanceNew(inputComponent, &m_audioUnit);
    checkStatus(status,"AudioComponentInstanceNew");
    
    // Enable IO for recording
    UInt32 flag = 1;
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Input, 
                                  kInputBus,
                                  &flag, 
                                  sizeof(flag));
    checkStatus(status,"Enable IO for recording");
    
    // Enable IO for playback
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Output, 
                                  kOutputBus,
                                  &flag, 
                                  sizeof(flag));
    checkStatus(status,"Enable IO for playback");
    
    // Describe format
    AudioStreamBasicDescription audioFormat;
    audioFormat.mSampleRate   = 44100.00;
    audioFormat.mFormatID   = kAudioFormatLinearPCM;
    audioFormat.mFormatFlags  = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
    audioFormat.mFramesPerPacket = 1;
    audioFormat.mChannelsPerFrame = 1;
    audioFormat.mBitsPerChannel  = 16;
    audioFormat.mBytesPerPacket  = 2;
    audioFormat.mBytesPerFrame  = 2;
    
    // Apply format
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Output, 
                                  kInputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));
    checkStatus(status,"Apply format1");
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Input, 
                                  kOutputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));
    checkStatus(status,"Apply format2");
    
    
    // Set input callback
    AURenderCallbackStruct callbackStruct;
    callbackStruct.inputProc = recordingCallback;
    callbackStruct.inputProcRefCon = self;
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioOutputUnitProperty_SetInputCallback, 
                                  kAudioUnitScope_Global, 
                                  kInputBus, 
                                  &callbackStruct, 
                                  sizeof(callbackStruct));
    checkStatus(status,"Set input callback");

    // Initialise
    status = AudioUnitInitialize(m_audioUnit);
    checkStatus(status,"AudioUnitInitialize");

    // Set ASBD For converting Output Stream
    
    AudioStreamBasicDescription outputFormat;
    memset(&outputFormat, 0, sizeof(outputFormat));
    outputFormat.mSampleRate  = 44100.00;
    outputFormat.mFormatID   = kAudioFormatMPEG4AAC;
    outputFormat.mFormatFlags       = kMPEG4Object_AAC_Main; 
    outputFormat.mFramesPerPacket = 1024;
    outputFormat.mChannelsPerFrame = 1;
    outputFormat.mBitsPerChannel = 0;
    outputFormat.mBytesPerFrame = 0;
    outputFormat.mBytesPerPacket = 0;

    //Create An Audio Converter
    status = AudioConverterNew( &audioFormat, &outputFormat, &m_audioConverterRef );
    checkStatus(status,"Create An Audio Converter");
    if(m_audioConverterRef) NSLog(@"m_audioConverterRef is created");

}

AudioOutputUnitStart

- (void)StartRecord
{
    OSStatus status = AudioOutputUnitStart(m_audioUnit);
    checkStatus(status,"AudioOutputUnitStart");
}

音频输出单元停止

- (void)StopRecord
{
    OSStatus status = AudioOutputUnitStop(m_audioUnit);
    checkStatus(status,"AudioOutputUnitStop");
}

结束

- (void)finish
{
    AudioUnitUninitialize(m_audioUnit);
}


@end
4

2 回答 2

2

我花了很长时间才理解AudioConverterFillComplexBuffer,尤其是如何使用它来实时转换音频。我在这里发布了我的方法:如何使用 CoreAudio 的 AudioConverter 实时编码 AAC?

于 2015-05-16T02:23:31.440 回答
1

参考https://developer.apple.com/library/ios/samplecode/iPhoneACFileConvertTest/Introduction/Intro.html

它演示了使用音频转换器 API 将 PCM 音频格式转换为包括 AAC 在内的压缩格式。

于 2014-02-23T15:51:52.580 回答