无法使用AVCaptureAudioDataOutputSampleDelegate

时间:2016-09-08 14:39:13

标签: ios swift avfoundation swift3 avaudiosession

我一直在谷歌搜索和研究好几天,但我似乎无法让这个工作,我无法在互联网上找到任何解决方案。

我正在尝试使用麦克风捕捉声音,然后通过扬声器播放。

这是我的代码:

class ViewController: UIViewController, AVAudioRecorderDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

var recordingSession: AVAudioSession!
var audioRecorder: AVAudioRecorder!
var captureSession: AVCaptureSession!
var microphone: AVCaptureDevice!
var inputDevice: AVCaptureDeviceInput!
var outputDevice: AVCaptureAudioDataOutput!

override func viewDidLoad() {
    super.viewDidLoad()

    recordingSession = AVAudioSession.sharedInstance()

    do{
        try recordingSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
        try recordingSession.setMode(AVAudioSessionModeVoiceChat)
        try recordingSession.setPreferredSampleRate(44000.00)
        try recordingSession.setPreferredIOBufferDuration(0.2)
        try recordingSession.setActive(true)

        recordingSession.requestRecordPermission() { [unowned self] (allowed: Bool) -> Void in
            DispatchQueue.main.async {
                if allowed {

                    do{
                        self.microphone = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
                        try self.inputDevice = AVCaptureDeviceInput.init(device: self.microphone)

                        self.outputDevice = AVCaptureAudioDataOutput()
                        self.outputDevice.setSampleBufferDelegate(self, queue: DispatchQueue.main)

                        self.captureSession = AVCaptureSession()
                        self.captureSession.addInput(self.inputDevice)
                        self.captureSession.addOutput(self.outputDevice)
                        self.captureSession.startRunning()
                    }
                    catch let error {
                        print(error.localizedDescription)
                    }
                }
            }
        }
    }catch let error{
        print(error.localizedDescription)
    }
}

回调函数:

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

    var audioBufferList = AudioBufferList(
        mNumberBuffers: 1,
        mBuffers: AudioBuffer(mNumberChannels: 0,
        mDataByteSize: 0,
        mData: nil)
    )

    var blockBuffer: CMBlockBuffer?

    var osStatus = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(

        sampleBuffer,
        nil,
        &audioBufferList,
        MemoryLayout<AudioBufferList>.size,
        nil,
        nil,
        UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
        &blockBuffer
    )

    do {
        var data: NSMutableData = NSMutableData.init()
        for i in 0..<audioBufferList.mNumberBuffers {

            var audioBuffer = AudioBuffer(
                 mNumberChannels: audioBufferList.mBuffers.mNumberChannels,
                 mDataByteSize: audioBufferList.mBuffers.mDataByteSize,
                 mData: audioBufferList.mBuffers.mData
            )

            let frame = audioBuffer.mData?.load(as: Float32.self)
            data.append(audioBuffer.mData!, length: Int(audioBuffer.mDataByteSize))

        }

        var dataFromNsData = Data.init(referencing: data)
        var avAudioPlayer: AVAudioPlayer = try AVAudioPlayer.init(data: dataFromNsData)
        avAudioPlayer.prepareToPlay()
        avAudioPlayer.play()
        }
    }
    catch let error {
        print(error.localizedDescription)
        //prints out The operation couldn’t be completed. (OSStatus error 1954115647.)
}

任何有关这方面的帮助都会令人惊叹,它可能会帮助很多其他人,因为很多不完整的快速版本就在那里。

谢谢。

1 个答案:

答案 0 :(得分:2)

你非常接近!您正在didOutputSampleBuffer回调中捕获音频,但这是一个高频率回调,因此您创建了大量AVAudioPlayer并传递原始LPCM数据,而他们只知道如何解析CoreAudio文件类型然后他们无论如何都超出了范围。

您可以使用AVCaptureSession AVAudioEngine轻松播放使用AVAudioPlayerNode抓取的缓冲区,但此时您也可以使用{{ 1}}也可以从麦克风录音:

AVAudioEngine