1

我正在设置一个 AVAudioEngine 实现以从麦克风获取音频并将其流式传输到 websocket 连接。首先,我点击了麦克风输入和中间混音器,将麦克风的音频从 44khz 降采样到 11khz。不知何故,日志上只显示了下采样混音器的“打印”,而从麦克风 inputNode 上点击的“打印”从未发生过。我究竟做错了什么?我是否在以错误的心态想象某些事情?

func initializeBlastEngine(){
        var listOfInputs = AVAudioSession.sharedInstance().availableInputs
        print("LIST OF INPUTS: "+(listOfInputs?.description)!)


        do{
            //pick which one you want (change index)
            var availableInput: AVAudioSessionPortDescription = listOfInputs![0] as AVAudioSessionPortDescription

            //set the Preffered Input
            try! AVAudioSession.sharedInstance().setPreferredInput(availableInput)
            try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)

            let ioBufferDuration = 128.0 / 44100.0

            try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(ioBufferDuration)
        }
        catch{
            print("AudioSession Init ERROR")
        }


        audioEngine.stop()
        audioEngine = AVAudioEngine.init()
        playerNode = AVAudioPlayerNode.init()

        inputNode = audioEngine.inputNode!
        mainMixer = audioEngine.mainMixerNode

        //uncommenting causes: Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: node != _mixer && node != _outputNode && node != _inputNode'
        //audioEngine.attachNode(inputNode)

        audioEngine.attachNode(downMixer)
        audioEngine.attachNode(playerNode)
        inputNode.installTapOnBus(0, bufferSize: 4096, format: inputNode.inputFormatForBus(0), block:
            { (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
//THIS CODE NEVER EXECUTES!!!

                print(NSString(string: "MIC Tap"))
                print(buffer.format.description)
                var micFormat = self.inputNode.inputFormatForBus(0).description
                print("mic Format: "+micFormat)
        })

        downMixer.installTapOnBus(0, bufferSize: 4096, format: downMixer.outputFormatForBus(0), block:  //originally 1024
            { (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
                print(NSString(string: "downMixer Tap"))
                do{
                    print("Downmixer Tap Format: "+self.downMixer.outputFormatForBus(0).description)//buffer.audioBufferList.debugDescription)

                    print(NSString(string: "writing"))
                }
                catch let error as NSError{
                    print(NSString(string: "Write failed: "+error.description));
                }
        })

        let format = inputNode.inputFormatForBus(0)
        audioEngine.connect(inputNode, to: downMixer, format: format)
        audioEngine.connect(downMixer, to: audioEngine.outputNode, format: format16KHzMono)
        audioEngine.connect(playerNode, to: mainMixer, format: mainMixer.outputFormatForBus(0))

        //ENGINE IGNITION!!!!
        audioEngine.prepare()
        try! audioEngine.start()
       //  connectWebSocket() //This is a tale for another day, first lets get the audio engine running

    }
4

1 回答 1

0
let node = audioEngine.inputNode!
let recordingFormat = node.outputFormat(forBus: 0)
node.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, time) in
    self.audioBuffer = buffer;
}

格式应该是您正在点击的节点的输出格式。

于 2016-08-15T00:42:17.117 回答