7

我将从一个简单的“操场”视图控制器类开始,它演示了我的问题:

class AudioEnginePlaygroundViewController: UIViewController {
    private var audioEngine: AVAudioEngine!
    private var micTapped = false
    override func viewDidLoad() {
        super.viewDidLoad()
        configureAudioSession()
        audioEngine = AVAudioEngine()
    }

    @IBAction func toggleMicTap(_ sender: Any) {
        guard let mic = audioEngine.inputNode else {
            return
        }
        if micTapped {
            mic.removeTap(onBus: 0)
            micTapped = false
            return
        }
        stopAudioPlayback()

        let micFormat = mic.inputFormat(forBus: 0)
        print("installing tap: \(micFormat.sampleRate) -- \(micFormat.channelCount)")
        mic.installTap(onBus: 0, bufferSize: 2048, format: micFormat) { (buffer, when) in
            print("in tap completion")
            let sampleData = UnsafeBufferPointer(start: buffer.floatChannelData![0], count: Int(buffer.frameLength))
        }
        micTapped = true
        startEngine()
    }

    @IBAction func playAudioFile(_ sender: Any) {
        stopAudioPlayback()
        let playerNode = AVAudioPlayerNode()

        let audioUrl = Bundle.main.url(forResource: "test_audio", withExtension: "wav")!
        let audioFile = readableAudioFileFrom(url: audioUrl)
        audioEngine.attach(playerNode)
        audioEngine.connect(playerNode, to: audioEngine.outputNode, format: audioFile.processingFormat)
        startEngine()
        playerNode.scheduleFile(audioFile, at: nil, completionHandler: nil)
        playerNode.play()
    }

    // MARK: Internal Methods

    private func configureAudioSession() {
        do {
            try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord, with: [.mixWithOthers, .defaultToSpeaker])
            try AVAudioSession.sharedInstance().setActive(true)
        } catch { }
    }

    private func readableAudioFileFrom(url: URL) -> AVAudioFile {
        var audioFile: AVAudioFile!
        do {
            try audioFile = AVAudioFile(forReading: url)
        } catch { }
        return audioFile
    }

    private func startEngine() {
        guard !audioEngine.isRunning else {
            return
        }

        do {
            try audioEngine.start()
        } catch { }
    }

    private func stopAudioPlayback() {
        audioEngine.stop()
        audioEngine.reset()
    }
}

上面的 VC 有一个 AVAudioEngine 实例和两个 UIButton 动作:一个播放在硬编码 url 中找到的音频文件,另一个在引擎的inputNode上切换安装/删除。

我的目标是让实时麦克风窃听和音频文件播放同时工作,但完全相互排斥。也就是说,无论我的麦克风水龙头的当前状态如何,我都希望能够触发播放,反之亦然。如果我在触发音频文件播放之前安装水龙头,一切都会按预期工作。但是,如果我先播放音频文件,然后尝试安装水龙头,则会出现以下崩溃:

[avae] AVAEInternal.h:70:_AVAE_Check: required condition is false: [AVAEGraphNode.mm:810:CreateRecordingTap: (IsFormatSampleRateAndChannelCountValid(format))]

这导致我通过installTap调用上方的日志语句检查麦克风格式的数据。果然,当我在播放前安装分接头时,我得到预期的采样率 44100.0 和通道数 1。但是当我先播放音频文件然后安装麦克风分接头时,我的日志显示采样率为 0 和一个通道计数 2 这给了我上面显示的错误。

我尝试修改 AVAudioEngine 的启动/重置流程,尝试了我的 AVAudioSession 的不同类别/模式组合(请参阅我的configureAudioSession方法),并尝试手动创建点击格式,如下所示:

let micFormat = mic.inputFormat(forBus: 0)
var trueFormat: AVAudioFormat!
if micFormat.sampleRate == 0 {
    trueFormat = AVAudioFormat(standardFormatWithSampleRate: 44100, channels: 1)
} else {
    trueFormat = micFormat
}
print("installing tap: \(micFormat.sampleRate) -- \(micFormat.channelCount)")
mic.installTap(onBus: 0, bufferSize: 2048, format: trueFormat) { (buffer, when) in
    print("in tap completion")
    let sampleData = UnsafeBufferPointer(start: buffer.floatChannelData![0], count: Int(buffer.frameLength))
}

这给了我一个类似但不同的错误:

[avae] AVAEInternal.h:70:_AVAE_Check: required condition is false: [AVAudioIONodeImpl.mm:896:SetOutputFormat: (IsFormatSampleRateAndChannelCountValid(hwFormat))]

我看不出麦克风的格式数据会因是否播放了 AVAudioPlayerNode 而有所不同的任何原因。

4

1 回答 1

11

经过一番搜索,我发现了问题。问题在于音频引擎的inputNode单例。从文档:

当第一次访问 inputNode 时,音频引擎会按需创建一个单例。要接收输入,请从输入音频节点的输出连接另一个音频节点,或在其上创建一个录音抽头。

加上我遇到的格式问题的参考:

检查输入节点的输入格式(特别是硬件格式)的非零采样率和通道数,以查看是否启用了输入。

在我的操场类中,触发音频文件播放的流程在创建“活动链”之前从不访问引擎的inputNode :

audioEngine.connect(playerNode, to: audioEngine.outputNode, format: audioFile.processingFormat)

如果您希望引擎在内部配置自身以进行输入,您似乎必须在 start() 之前访问 AVAudioEngine 的inputNode 。即使停止()和重置()引擎也不会导致对inputNode的访问以重新配置引擎。(我怀疑通过disconnectNode调用手动中断活动链将允许内部重新配置,但我还不确定)。

所以代码方面的修复很简单:只需在实例化后立即访问引擎的输入节点,以便为引擎配置音频输入。这是文件播放和麦克风点击一起工作的整个课程:

import UIKit

class AudioEnginePlaygroundViewController: UIViewController {
    private var audioEngine: AVAudioEngine!
    private var mic: AVAudioInputNode!
    private var micTapped = false

    override func viewDidLoad() {
        super.viewDidLoad()
        configureAudioSession()
        audioEngine = AVAudioEngine()
        mic = audioEngine.inputNode!
    }

    @IBAction func toggleMicTap(_ sender: Any) {
        if micTapped {
            mic.removeTap(onBus: 0)
            micTapped = false
            return
        }

        let micFormat = mic.inputFormat(forBus: 0)
        mic.installTap(onBus: 0, bufferSize: 2048, format: micFormat) { (buffer, when) in
            let sampleData = UnsafeBufferPointer(start: buffer.floatChannelData![0], count: Int(buffer.frameLength))
        }
        micTapped = true
        startEngine()
    }

    @IBAction func playAudioFile(_ sender: Any) {
        stopAudioPlayback()
        let playerNode = AVAudioPlayerNode()

        let audioUrl = Bundle.main.url(forResource: "test_audio", withExtension: "wav")!
        let audioFile = readableAudioFileFrom(url: audioUrl)
        audioEngine.attach(playerNode)
        audioEngine.connect(playerNode, to: audioEngine.outputNode, format: audioFile.processingFormat)
        startEngine()
        playerNode.scheduleFile(audioFile, at: nil, completionHandler: nil)
        playerNode.play()
    }

    // MARK: Internal Methods

    private func configureAudioSession() {
        do {
            try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord, with: [.mixWithOthers, .defaultToSpeaker])
            try AVAudioSession.sharedInstance().setActive(true)
        } catch { }
    }

    private func readableAudioFileFrom(url: URL) -> AVAudioFile {
        var audioFile: AVAudioFile!
        do {
            try audioFile = AVAudioFile(forReading: url)
        } catch { }
        return audioFile
    }

    private func startEngine() {
        guard !audioEngine.isRunning else {
            return
        }

        do {
            try audioEngine.start()
        } catch { }
    }

    private func stopAudioPlayback() {
        audioEngine.stop()
        audioEngine.reset()
    }
}
于 2017-12-20T21:29:08.633 回答