我有应用程序通过网络从 opus 解码流音频到 pcm,输出数据是 PCM 16 位,我成功解码,但是当我尝试使用 AVAudioEngine 播放它失败时,我无法将采样率从 44100 更改为 48000作品生成。
这是我用来播放音频的代码的一部分
self.self.audioEngine = AVAudioEngine()
self.player = AVAudioPlayerNode()
self.downMixer = AVAudioMixerNode()
self.audioEngine!.attach(downMixer!)
self.audioEngine!.attach(player!)
let format48KHzMono = AVAudioFormat.init(commonFormat: AVAudioCommonFormat.pcmFormatInt16 , sampleRate: 48000.0, channels: 1, interleaved: true)
self.audioEngine!.connect(downMixer!, to: audioEngine!.outputNode, format: format48KHzMono)//use new audio format
self.audioEngine!.connect(downMixer!, to: audioEngine!.mainMixerNode , format: format48KHzMono)
self.audioEngine!.connect(player!, to: audioEngine!.outputNode , format: player!.outputFormat(forBus: 0))
downMixer!.outputVolume = 0.0
audioEngine!.prepare()
try! audioEngine!.start()
try! self.player?.play()
这是我将 pcm 数据转换为 AVAudioPCMBuffer 的方法
func toPCMBuffer(data: NSData) -> AVAudioPCMBuffer {
let audioFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16 , sampleRate: 48000.0, channels: 1, interleaved: true) // given NSData audio format
let PCMBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat!, frameCapacity: UInt32(data.length) / audioFormat!.streamDescription.pointee.mBytesPerFrame )
PCMBuffer!.frameLength = PCMBuffer!.frameCapacity
let channels = UnsafeBufferPointer(start: PCMBuffer?.int16ChannelData, count: Int(PCMBuffer!.format.channelCount))
data.getBytes(UnsafeMutableRawPointer(channels[0]) , length: data.length)
return PCMBuffer!
}
在循环中我使用这个
if let decodedDataChunk = OpusKit.shared.decodeData(frame) {
let pcmData:AVAudioPCMBuffer = self.toPCMBuffer(data: decodedDataChunk as NSData)
self.player?.scheduleBuffer(pcmData)
}