9

我不能用 AVAudioPCMBuffer 播放声音(虽然我可以用 AVAudioFile 播放)。我得到了这个错误。

错误:AVAudioBuffer.mm:169:-[AVAudioPCMBuffer initWithPCMFormat:frameCapacity:]:所需条件为假:isCommonFormat

下面是我的代码,非常感谢您的帮助。

import UIKit
import AVFoundation

class ViewController: UIViewController {

let audioEngine: AVAudioEngine = AVAudioEngine()
let audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.

    audioEngine.attachNode(audioFilePlayer)

    let filePath: String = NSBundle.mainBundle().pathForResource("test", ofType: "mp3")!
    let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
    let audioFile = AVAudioFile(forReading: fileURL, error: nil)
    let audioFormat = audioFile.fileFormat
    let audioFrameCount = UInt32(audioFile.length)
    let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)

    var mainMixer = audioEngine.mainMixerNode
    audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)

    audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options: nil, completionHandler: nil)

    var engineError: NSError?
    audioEngine.startAndReturnError(&engineError)

    audioFilePlayer.play()
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

}
4

5 回答 5

8

让我分享一下,这以某种方式起作用,尽管我不完全理解。

import UIKit
import AVFoundation

class ViewController: UIViewController {

var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.


    let filePath: String = NSBundle.mainBundle().pathForResource("test", ofType: "mp3")!
    println("\(filePath)")
    let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
    let audioFile = AVAudioFile(forReading: fileURL, error: nil)
    let audioFormat = audioFile.processingFormat
    let audioFrameCount = UInt32(audioFile.length)
    let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
    audioFile.readIntoBuffer(audioFileBuffer, error: nil)

    var mainMixer = audioEngine.mainMixerNode
    audioEngine.attachNode(audioFilePlayer)
    audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
    audioEngine.startAndReturnError(nil)

    audioFilePlayer.play()
    audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options: nil, completionHandler: nil)
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

}
于 2014-11-01T03:35:30.197 回答
3

问题是您将 PCM 缓冲区的格式设置为非 PCM 格式。因此,您需要AVAudioPCMBuffer使用 AVAudioFile 的processingFormat.

于 2014-11-04T01:12:35.407 回答
3

使用时,AVAudioPCMBuffer()如果您尝试使用不是mixer.outputFormat(forBus: 0)

它不会接受单声道格式,它会抱怨混音器的输出格式和您的格式之间的不匹配,即使您描述的格式完全相同,也不会产生解释问题所在的错误。

于 2019-08-10T23:42:13.933 回答
0

将@Bick 的代码更新为Swift 5.3

代码逻辑很容易搞定

  • 首先,准备数据

创建一个空的AVAudioPCMBuffer,然后在其中填充音频数据。

  • 其次,连接节点,用数据玩

    import UIKit
    import AVFoundation
    
      class ViewControllerX: UIViewController {
    
        var audioEngine = AVAudioEngine()
        var audioFilePlayer = AVAudioPlayerNode()
    
        override func viewDidLoad() {
          super.viewDidLoad()
    
          // prepare the data
          guard let filePath = Bundle.main.path(forResource: "test", ofType: "mp3") else{ return }
    
          print("\(filePath)")
          let fileURL = URL(fileURLWithPath: filePath)
          do {
              let audioFile = try AVAudioFile(forReading: fileURL)
    
              let audioFormat = audioFile.processingFormat
              let audioFrameCount = UInt32(audioFile.length)
              guard let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount) else{ return }
              try audioFile.read(into: audioFileBuffer)
    
              // connect the nodes, and use the data to play
              let mainMixer = audioEngine.mainMixerNode
              audioEngine.attach(audioFilePlayer)
              audioEngine.connect(audioFilePlayer, to: mainMixer, format: audioFileBuffer.format)
    
              try audioEngine.start()
    
              audioFilePlayer.play()
              audioFilePlayer.scheduleBuffer(audioFileBuffer, completionHandler: nil)
    
          } catch {
              print(error)
          }
    
        }
    }
    
于 2020-12-02T03:24:22.350 回答
0

您应该使用 audioFile.processingFormat 作为 AVAudioPCMBuffer 构造函数的参数,而不是调用 audioFile.fileFormat。

let buffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat,
                                            frameCapacity: bufferCapacity) 
于 2021-01-27T05:18:34.577 回答