3

我正在尝试使用 Audiounits 对音频文件进行 TimeStretching。

我正在使用此代码。http://pastebin.com/DWMTw4n9

这是我正在使用的示例项目:https ://dl.dropbox.com/u/12216224/buglets/TimeSliderDemo-Buglet.zip

如何在 ios 中使用 AudioUnits 保存音频文件?

我尝试了什么:

试图通过保存音频

OSStatus MyAURenderCallback(void *inRefCon,
                        AudioUnitRenderActionFlags *actionFlags,
                        const AudioTimeStamp *inTimeStamp,
                        UInt32 inBusNumber,
                        UInt32 inNumberFrames,
                        AudioBufferList *ioData) {

AudioUnit mixerUnit = (AudioUnit)inRefCon;

AudioUnitRender(mixerUnit,
                actionFlags,
                inTimeStamp,
                0,
                inNumberFrames,
                ioData);

//Store the Audio units in a File
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:@"outputRamp.aif"];

ExtAudioFileWriteAsync((__bridge ExtAudioFileRef)(outputURL),
                       inNumberFrames,
                       ioData);

return noErr;
}


AURenderCallbackStruct callbackStruct = {0};
callbackStruct.inputProc = MyAURenderCallback;
//    callbackStruct.inputProcRefCon = mixerUnit;
callbackStruct.inputProcRefCon = (__bridge void *)self;

AudioUnitSetProperty(self.effectUnit,
                     kAudioUnitProperty_SetRenderCallback,
                     kAudioUnitScope_Input,
                     0,
                     &callbackStruct,
                     sizeof(callbackStruct));

但是回调永远不会被调用。将 Audiounits 保存在文件中是否正确?

4

1 回答 1

2

您应该为节点设置输入回调函数`

AURenderCallbackStruct inputCallbackStruct;
inputCallbackStruct.inputProc = &MyAURenderCallback;
inputCallbackStruct.inputProcRefCon = (__bridge void *)(self);


OSStatus result = noErr;


// Attach the render callback function to remoteIO's input on bus 0
result = AUGraphSetNodeInputCallback (
                                      self.auGraph,
                                      ioNode,
                                      0,
                                      &inputCallbackStruct
                                      );


CheckError(result, "AUGraphSetNodeInputCallback");

请参阅示例项目Audio MixerHow to add a Render Callback to RemoteIO after a Mixer in iOS

你应该创建ExtAudioFileRef recordingfileref

AudioStreamBasicDescription dstFormat;
dstFormat.mSampleRate=44100.0;
dstFormat.mFormatID=kAudioFormatLinearPCM;
dstFormat.mFormatFlags=kAudioFormatFlagsNativeEndian|kAudioFormatFlagIsSignedInteger|kAudioFormatFlagIsPacked;

dstFormat.mBytesPerPacket=4;
dstFormat.mBytesPerFrame=4;
dstFormat.mFramesPerPacket=1;
dstFormat.mChannelsPerFrame=2;
dstFormat.mBitsPerChannel=16;
dstFormat.mReserved=0;

// create the capture file
status=  ExtAudioFileCreateWithURL((__bridge CFURLRef)(_outputURL), kAudioFileWAVEType, &dstFormat, NULL, kAudioFileFlags_EraseFile, &_recordingfileref);
//CheckError( status ,"couldnt create audio file");
// set the capture file's client format to be the canonical format from the queue




status=ExtAudioFileSetProperty(self.recordingfileref, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), &StreamFormat);

然后你可以写iodata

ExtAudioFileWriteAsync(recordingfileref,
                   inNumberFrames,
                   ioData);

当你完成

OSStatus status = ExtAudioFileDispose(_recordingfileref);

printf("OSStatus(ExtAudioFileDispose): %ld\n", status);
于 2013-10-17T09:13:57.063 回答