1

在下面的代码中,我创建了两个声音,sound1 和 sound2。每个声音都包含许多样本,这些样本允许同时播放相同的声音。问题是,如果我创建看似超过 6 到 8 个 AVAudioPlayerNodes,每个节点都有一个 AVAudioUnitTimePitch,那么音频就会完全混乱。当我将样本数量增加得太高时,我什至无法播放单个声音。我不确定我的代码是否错误,或者 AVAudioEngine 的节点限制是多少。

class AudioManager{
    var audioEngine:AVAudioEngine!;
    var mixer:AVAudioMixerNode!;
    var sound1:Sound!;
    var sound2:Sound!;
    init(){
        audioEngine = AVAudioEngine();
        mixer = audioEngine.mainMixerNode; //automatically creates instance of mixer node, output node, and connects

        do{
            try audioEngine.start();
        }catch let e as NSError{
            print("Error Starting AudioEngine \(e)");
        }

        sound1 = Sound(aManager: self, path: "assets/sounds/waterRefill", ofType: "mp3", numOfSamples: 7);
        sound2 = Sound(aManager: self, path: "assets/sounds/balloonCreate", ofType: "mp3", numOfSamples: 2);


    }

    func playSound(){
        sound1.play(1.0, pitch: 1.0);
    }

    func playSound2(){
        sound2.play(1.0, pitch: 1.0);
    }

    class Sound {
        var audioManager:AudioManager!;
        var audioFileBuffer:AVAudioPCMBuffer!;
        var numSamples:Int = 1;
        var audioIndex:Int = 0;
        var sampleList:[Sample] = [Sample]();

        init(aManager:AudioManager, path:String, ofType:String, numOfSamples:Int){
            audioManager = aManager;
            if(numOfSamples < 1){
                numSamples = 1;
            }else{
                numSamples = numOfSamples;
            }
            audioFileBuffer = createAudioBuffer(path, ofType: ofType);
            for (var i = 0; i < numSamples; i++){
                sampleList.append(Sample(sound: self));
            }
        }

        func createAudioBuffer(path:String, ofType:String)-> AVAudioPCMBuffer?{
            let filePath: String = NSBundle.mainBundle().pathForResource(path, ofType: ofType)!
            let fileURL: NSURL = NSURL(fileURLWithPath: filePath)
            do{
                let audioFile = try AVAudioFile(forReading: fileURL)
                let audioFormat = audioFile.processingFormat
                let audioFrameCount = UInt32(audioFile.length)
                let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
                do{
                    try audioFile.readIntoBuffer(audioFileBuffer)
                    return audioFileBuffer;
                }catch let e as NSError{
                    print("Error loading Audio Into Buffer: \(e)");
                }
            }catch let e as NSError{
                print("Error loading Audio File: \(e)");
            }
            return nil;
        }

        private func runIndex(){
            if(audioIndex < (numSamples-1)){
                audioIndex++;
            }else{
                audioIndex = 0;
            }
        }

        func play(volume:Float, pitch:Float){

            var count:Int = 0;
            while(count < numSamples){
                if(numSamples > 1){
                    runIndex();
                }
                if (!sampleList[audioIndex].pitchPlayer.playing) {
                    sampleList[audioIndex].volume = volume;
                    sampleList[audioIndex].pitch = pitch;
                    sampleList[audioIndex].playSample();
                    break;
                }
                count++;
            }

        }

        class Sample{
            var parentSound:Sound!
            var pitchPlayer:AVAudioPlayerNode!;
            var timePitch:AVAudioUnitTimePitch!;
            var volume:Float = 1.0
            var pitch:Float = 1.0

            init(sound:Sound){
                parentSound = sound;
                pitchPlayer = AVAudioPlayerNode();
                timePitch = AVAudioUnitTimePitch();

                parentSound.audioManager.audioEngine.attachNode(pitchPlayer);
                parentSound.audioManager.audioEngine.attachNode(timePitch);

                parentSound.audioManager.audioEngine.connect(pitchPlayer, to: timePitch, format: parentSound.audioFileBuffer.format);
                parentSound.audioManager.audioEngine.connect(timePitch, to: parentSound.audioManager.mixer, format: parentSound.audioFileBuffer.format);


            }

            func playSample(){
                pitchPlayer.volume = volume;
                timePitch.pitch = pitch;
                print("Sample Play");

                pitchPlayer.play();
                pitchPlayer.scheduleBuffer(parentSound.audioFileBuffer, atTime: nil, options:.Interrupts, completionHandler: {[unowned self]() in
                    print("Is Stopped: \(self.pitchPlayer.playing)");
                    self.pitchPlayer.stop();
                    print("Is Stopped: \(self.pitchPlayer.playing)");
                    });
            }
        }
    }
}
4

1 回答 1

1

我从未听说过对 AVAudioEngine 图中的节点数量有任何限制,但是在添加数百个节点后,我看到了一些非常糟糕的性能。我找到的解决方案是在播放完这些节点后将其删除。

scheduleBuffer完成处理程序是这样做的好地方,但我会将删除包装在一个dispatch_async-to-the-main-queue 调用中,因为音频引擎在调用完成处理程序时可能仍在使用该节点。

另一种选择是在播放完样本后重用播放器节点,而不是为下一个样本创建新节点,但这种方法实施起来可能有点复杂。

于 2016-06-14T23:07:27.197 回答