由于AVAudioPlayerNode
本质上是一个包装器kAudioUnitSubType_ScheduledSoundPlayer
(大概是一些文件读取和缓冲代码从kAudioUnitSubType_AudioFilePlayer
抛出但使用ExtAudioFile
),我做了一个实验,看看较低级别的对应物是否表现出相同的行为。
这并不完全是一个苹果对苹果的比较,但它似乎kAudioUnitSubType_ScheduledSoundPlayer
按预期工作,所以这可能是AVAudioPlayerNode
.
我用于测试的代码如下。kAudioUnitSubType_ScheduledSoundPlayer
用于调度三个切片(缓冲区)。它们来自同一个文件,但无关紧要,因为kAudioUnitSubType_ScheduledSoundPlayer
只知道缓冲区而不知道文件。
所有三个切片都按预期调用回调。所以看起来问题很可能是如何在AVAudioPlayerNode
内部处理这些回调并将它们路由到非实时调度队列(因为回调kAudioUnitSubType_ScheduledSoundPlayer
是在 HAL 的实时 IO 线程上处理的,并且不能信任客户端不会阻塞IO 线程)。
// ViewController.m
#import "ViewController.h"
@import AudioToolbox;
@import AVFoundation;
@import os.log;
@interface ViewController ()
{
AUGraph _graph;
AUNode _player;
AUNode _mixer;
AUNode _output;
ScheduledAudioSlice _slice [3];
AVAudioPCMBuffer *_buf;
}
- (void)scheduledAudioSliceCompleted:(ScheduledAudioSlice *)slice;
@end
void myScheduledAudioSliceCompletionProc(void * __nullable userData, ScheduledAudioSlice *slice)
{
// ⚠️ WARNING ⚠️
// THIS FUNCTION IS CALLED FROM THE REAL TIME RENDERING THREAD.
// OBJ-C USE HERE IS FOR TESTING CALLBACK FUNCTIONALITY ONLY
// OBJ-C IS NOT REAL TIME SAFE
// DO NOT DO THIS IN PRODUCTION CODE!!!
[(__bridge ViewController *)userData scheduledAudioSliceCompleted:slice];
}
@implementation ViewController
- (void)dealloc {
[self closeGraph];
}
- (void)viewDidLoad {
[super viewDidLoad];
[self openGraph];
[self schedule];
[self startPlayer];
[self startGraph];
}
-(OSStatus)openGraph {
OSStatus result = NewAUGraph(&_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "NewAUGraph failed: %d", result);
return result;
}
// The graph will look like:
// Player -> MultiChannelMixer -> Output
AudioComponentDescription desc;
// Player
desc.componentType = kAudioUnitType_Generator;
desc.componentSubType = kAudioUnitSubType_ScheduledSoundPlayer;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = kAudioComponentFlag_SandboxSafe;
desc.componentFlagsMask = 0;
result = AUGraphAddNode(_graph, &desc, &_player);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphAddNode failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Mixer
desc.componentType = kAudioUnitType_Mixer;
desc.componentSubType = kAudioUnitSubType_MultiChannelMixer;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = kAudioComponentFlag_SandboxSafe;
desc.componentFlagsMask = 0;
result = AUGraphAddNode(_graph, &desc, &_mixer);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphAddNode failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Output
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_HALOutput;
desc.componentFlags = kAudioComponentFlag_SandboxSafe;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlagsMask = 0;
result = AUGraphAddNode(_graph, &desc, &_output);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphAddNode failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Connections
result = AUGraphConnectNodeInput(_graph, _player, 0, _mixer, 0);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphConnectNodeInput failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
result = AUGraphConnectNodeInput(_graph, _mixer, 0, _output, 0);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphConnectNodeInput failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Open the graph
result = AUGraphOpen(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphOpen failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
// Set the mixer's volume on the input and output
AudioUnit au = NULL;
result = AUGraphNodeInfo(_graph, _mixer, NULL, &au);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphNodeInfo failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
result = AudioUnitSetParameter(au, kMultiChannelMixerParam_Volume, kAudioUnitScope_Input, 0, 1.f, 0);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetParameter (kMultiChannelMixerParam_Volume, kAudioUnitScope_Input) failed: %d", result);
result = AudioUnitSetParameter(au, kMultiChannelMixerParam_Volume, kAudioUnitScope_Output, 0, 1.f, 0);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetParameter (kMultiChannelMixerParam_Volume, kAudioUnitScope_Output) failed: %d", result);
// Initialize the graph
result = AUGraphInitialize(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphInitialize failed: %d", result);
result = DisposeAUGraph(_graph);
if(noErr != result)
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
_graph = NULL;
return result;
}
return noErr;
}
- (OSStatus)closeGraph {
Boolean graphIsRunning = NO;
OSStatus result = AUGraphIsRunning(_graph, &graphIsRunning);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphIsRunning failed: %d", result);
return result;
}
if(graphIsRunning) {
result = AUGraphStop(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphStop failed: %d", result);
return result;
}
}
Boolean graphIsInitialized = false;
result = AUGraphIsInitialized(_graph, &graphIsInitialized);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphIsInitialized failed: %d", result);
return result;
}
if(graphIsInitialized) {
result = AUGraphUninitialize(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphUninitialize failed: %d", result);
return result;
}
}
result = AUGraphClose(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphClose failed: %d", result);
return result;
}
result = DisposeAUGraph(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "DisposeAUGraph failed: %d", result);
return result;
}
_graph = NULL;
_player = -1;
_mixer = -1;
_output = -1;
return noErr;
}
- (OSStatus)startGraph {
OSStatus result = AUGraphStart(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphStart failed: %d", result);
return result;
}
return noErr;
}
- (OSStatus)stopGraph {
OSStatus result = AUGraphStop(_graph);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphStop failed: %d", result);
return result;
}
return noErr;
}
- (OSStatus)startPlayer {
AudioUnit au;
OSStatus result = AUGraphNodeInfo(_graph, _player, NULL, &au);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphNodeInfo failed: %d", result);
return result;
}
AudioTimeStamp ts = {0};
ts.mFlags = kAudioTimeStampSampleTimeValid;
ts.mSampleTime = 0;
result = AudioUnitSetProperty(au, kAudioUnitProperty_ScheduleStartTimeStamp, kAudioUnitScope_Global, 0, &ts, sizeof(ts));
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetProperty failed: %d", result);
return result;
}
return noErr;
}
- (OSStatus)schedule {
AudioUnit au;
OSStatus result = AUGraphNodeInfo(_graph, _player, NULL, &au);
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AUGraphNodeInfo failed: %d", result);
return result;
}
AVAudioFile *file = [[AVAudioFile alloc] initForReading:[NSURL fileURLWithPath:@"/tmp/test.wav" isDirectory:NO] commonFormat:AVAudioPCMFormatFloat32 interleaved:NO error:nil];
if(!file)
return paramErr;
_buf = [[AVAudioPCMBuffer alloc] initWithPCMFormat:file.processingFormat frameCapacity:(file.processingFormat.sampleRate * 2)];
if(![file readIntoBuffer:_buf error:nil])
return paramErr;
AudioTimeStamp ts = {0};
ts.mFlags = kAudioTimeStampSampleTimeValid;
ts.mSampleTime = 0;
_slice[0].mTimeStamp = ts;
_slice[0].mCompletionProc = myScheduledAudioSliceCompletionProc;
_slice[0].mCompletionProcUserData = (__bridge void *)self;
_slice[0].mNumberFrames = _buf.frameLength;
_slice[0].mBufferList = _buf.mutableAudioBufferList;
result = AudioUnitSetProperty(au, kAudioUnitProperty_ScheduleAudioSlice, kAudioUnitScope_Global, 0, &_slice[0], sizeof(_slice[0]));
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetProperty failed: %d", result);
return result;
}
ts.mSampleTime += _slice[0].mNumberFrames;
_slice[1] = _slice[0];
_slice[1].mTimeStamp = ts;
result = AudioUnitSetProperty(au, kAudioUnitProperty_ScheduleAudioSlice, kAudioUnitScope_Global, 0, &_slice[1], sizeof(_slice[1]));
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetProperty failed: %d", result);
return result;
}
ts.mSampleTime += _slice[1].mNumberFrames;
_slice[2] = _slice[1];
_slice[2].mTimeStamp = ts;
result = AudioUnitSetProperty(au, kAudioUnitProperty_ScheduleAudioSlice, kAudioUnitScope_Global, 0, &_slice[2], sizeof(_slice[2]));
if(noErr != result) {
os_log_error(OS_LOG_DEFAULT, "AudioUnitSetProperty failed: %d", result);
return result;
}
return noErr;
}
- (void)scheduledAudioSliceCompleted:(ScheduledAudioSlice *)slice {
if(slice == &_slice[0])
NSLog(@"_slice[0] scheduledAudioSliceCompleted:%p, mFlags = 0x%.2x", slice, slice->mFlags);
else if(slice == &_slice[1])
NSLog(@"_slice[1] scheduledAudioSliceCompleted:%p, mFlags = 0x%.2x", slice, slice->mFlags);
else if(slice == &_slice[2])
NSLog(@"_slice[2] scheduledAudioSliceCompleted:%p, mFlags = 0x%.2x", slice, slice->mFlags);
else
NSLog(@"scheduledAudioSliceCompleted:%p, mFlags = 0x%.2x for unknown slice", slice, slice->mFlags);
}
@end
输出:
XXX _slice[0] scheduledAudioSliceCompleted:0x7f82ee41add0, mFlags = 0x03
XXX _slice[1] scheduledAudioSliceCompleted:0x7f82ee41ae40, mFlags = 0x03
XXX _slice[2] scheduledAudioSliceCompleted:0x7f82ee41aeb0, mFlags = 0x03
mFlags
的0x03
等于kScheduledAudioSliceFlag_Complete | kScheduledAudioSliceFlag_BeganToRender
。