如何用AVAudioPCMBuffer播放声音

时间:2014-10-31 08:58:39

标签: swift ios8 avaudiopcmbuffer avaudiofile avaudioplayernode

我无法用AVAudioPCMBuffer播放声音(虽然我可以使用AVAudioFile)。 我收到了这个错误。

错误: AVAudioBuffer.mm:169: - [AVAudioPCMBuffer initWithPCMFormat:frameCapacity:]:必需条件为false:isCommonFormat

以下是我的代码,我非常感谢您的帮助。

import UIKit
import AVFoundation

class ViewController: UIViewController {

let audioEngine: AVAudioEngine = AVAudioEngine()
let audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.

    audioEngine.attachNode(audioFilePlayer)

    let filePath: String = NSBundle.mainBundle().pathForResource("test", ofType: "mp3")!
    let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
    let audioFile = AVAudioFile(forReading: fileURL, error: nil)
    let audioFormat = audioFile.fileFormat
    let audioFrameCount = UInt32(audioFile.length)
    let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)

    var mainMixer = audioEngine.mainMixerNode
    audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)

    audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options: nil, completionHandler: nil)

    var engineError: NSError?
    audioEngine.startAndReturnError(&engineError)

    audioFilePlayer.play()
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

}

5 个答案:

答案 0 :(得分:5)

让我分享一下,虽然我不能完全理解,但这种方式仍然有效。

import UIKit
import AVFoundation

class ViewController: UIViewController {

var audioEngine: AVAudioEngine = AVAudioEngine()
var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.


    let filePath: String = NSBundle.mainBundle().pathForResource("test", ofType: "mp3")!
    println("\(filePath)")
    let fileURL: NSURL = NSURL(fileURLWithPath: filePath)!
    let audioFile = AVAudioFile(forReading: fileURL, error: nil)
    let audioFormat = audioFile.processingFormat
    let audioFrameCount = UInt32(audioFile.length)
    let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount)
    audioFile.readIntoBuffer(audioFileBuffer, error: nil)

    var mainMixer = audioEngine.mainMixerNode
    audioEngine.attachNode(audioFilePlayer)
    audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
    audioEngine.startAndReturnError(nil)

    audioFilePlayer.play()
    audioFilePlayer.scheduleBuffer(audioFileBuffer, atTime: nil, options: nil, completionHandler: nil)
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

}

答案 1 :(得分:2)

问题是您是否将PCM缓冲区的格式设置为非PCM格式。因此,您需要使用AVAudioFile AVAudioPCMBuffer创建processingFormat

答案 2 :(得分:0)

使用AVAudioPCMBuffer()时,如果尝试使用非mixer.outputFormat(forBus: 0)的pcmFormat,则会遇到奇怪的错误

它将不接受单声道通道格式,它将抱怨调音台的输出格式和您的格式之间不匹配,即使您描述的格式完全相同,也不会产生错误来解释问题所在。< / p>

答案 3 :(得分:0)

将@Bick的代码更新为 Swift 5.3

代码逻辑很容易获得

  • 首先,准备数据

创建一个空的AVAudioPCMBuffer,然后在其中填充音频数据。

  • 其次,连接节点,然后使用数据进行播放

    import UIKit
    import AVFoundation
    
      class ViewControllerX: UIViewController {
    
        var audioEngine = AVAudioEngine()
        var audioFilePlayer = AVAudioPlayerNode()
    
        override func viewDidLoad() {
          super.viewDidLoad()
    
          // prepare the data
          guard let filePath = Bundle.main.path(forResource: "test", ofType: "mp3") else{ return }
    
          print("\(filePath)")
          let fileURL = URL(fileURLWithPath: filePath)
          do {
              let audioFile = try AVAudioFile(forReading: fileURL)
    
              let audioFormat = audioFile.processingFormat
              let audioFrameCount = UInt32(audioFile.length)
              guard let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount) else{ return }
              try audioFile.read(into: audioFileBuffer)
    
              // connect the nodes, and use the data to play
              let mainMixer = audioEngine.mainMixerNode
              audioEngine.attach(audioFilePlayer)
              audioEngine.connect(audioFilePlayer, to: mainMixer, format: audioFileBuffer.format)
    
              try audioEngine.start()
    
              audioFilePlayer.play()
              audioFilePlayer.scheduleBuffer(audioFileBuffer, completionHandler: nil)
    
          } catch {
              print(error)
          }
    
        }
    }
    

答案 4 :(得分:0)

您应该使用 audioFile.processingFormat 作为 AVAudioPCMBuffer 构造函数的参数,而不是调用 audioFile.fileFormat。

let buffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat,
                                            frameCapacity: bufferCapacity)