AudioKit 4.3:录制音频,将其渲染为离线,然后播放

时间:2018-06-10 23:06:38

标签: ios swift audiokit

我正在尝试录制音频,然后使用AudioKit.renderToFile离线保存,然后使用AKPlayer播放原始录制的音频文件。

import UIKit
import AudioKit


class ViewController: UIViewController {

private var recordUrl:URL!
private var isRecording:Bool = false

public var player:AKPlayer!
private let format = AVAudioFormat(commonFormat: .pcmFormatFloat64, sampleRate: 44100, channels: 2, interleaved: true)!

private var amplitudeTracker:AKAmplitudeTracker!
private var boostedMic:AKBooster!
private var mic:AKMicrophone!
private var micMixer:AKMixer!
private var silence:AKBooster!
public var recorder: AKNodeRecorder!

@IBOutlet weak var recordButton: UIButton!

override func viewDidLoad() {
    super.viewDidLoad()
    //self.recordUrl = Bundle.main.url(forResource: "sound", withExtension: "caf")
    //self.startAudioPlayback(url: self.recordUrl!)
    self.recordUrl = self.urlForDocument("record.caf")
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

func requestMic(completion: @escaping () -> Void) {
    AVAudioSession.sharedInstance().requestRecordPermission({ (granted: Bool) in

        if granted { completion()}
    })
}
public func switchToMicrophone() {
    stopEngine()
    do {
        try AKSettings.setSession(category: .playAndRecord, with: .allowBluetoothA2DP)
    } catch {
        AKLog("Could not set session category.")
    }
    mic = AKMicrophone()
    micMixer = AKMixer(mic)
    boostedMic = AKBooster(micMixer, gain: 5)
    amplitudeTracker = AKAmplitudeTracker(boostedMic)
    silence = AKBooster(amplitudeTracker, gain: 0)
    AudioKit.output = silence
    startEngine()
}

@IBAction func startStopRecording(_ sender: Any) {
    self.isRecording = !self.isRecording

    if self.isRecording {
        self.startRecording()
        self.recordButton.setTitle("Stop Recording", for: .normal)
    } else {
        self.stopRecording()
        self.recordButton.setTitle("Start Recording", for: .normal)
    }
}

func startRecording() {
    self.requestMic() {
        self.switchToMicrophone()
        if let url = self.recordUrl {
            do {
            let audioFile = try AKAudioFile(forWriting: url, settings: self.format.settings, commonFormat: .pcmFormatFloat64, interleaved: true)

            self.recorder = try AKNodeRecorder(node: self.micMixer, file: audioFile)

            try self.recorder.reset()
            try self.recorder.record()
            } catch {
                print("error setting up recording", error)
            }
        }
    }
}

func stopRecording() {
    recorder.stop()
    startAudioPlayback(url: self.recordUrl)
}

@IBAction func saveToDisk(_ sender: Any) {
    if let source = self.player, let saveUrl = self.urlForDocument("pitchAudio.caf") {
        do {
            source.stop()

            let audioFile = try AKAudioFile(forWriting: saveUrl, settings: self.format.settings, commonFormat: .pcmFormatFloat64, interleaved: true)
            try AudioKit.renderToFile(audioFile, duration: source.duration, prerender: {
                source.play()
            })
            print("audio file rendered")

        } catch {
            print("error rendering", error)
        }

        // PROBLEM STARTS HERE //

        self.startAudioPlayback(url: self.recordUrl)

    }
}

public func startAudioPlayback(url:URL) {
    print("loading playback audio", url)
    self.stopEngine()

    do {
        try AKSettings.setSession(category: .playback)
        player = AKPlayer.init()
        try player.load(url: url)
    }
    catch {
        print("error setting up audio playback", error)
        return
    }

    player.prepare()
    player.isLooping = true
    self.setPitch(pitch: self.getPitch(), saveValue: false)
    AudioKit.output = player

    startEngine()
    startPlayer()
}


public func startPlayer() {
    if AudioKit.engine.isRunning { self.player.play() }
    else { print("audio engine not running, can't play") }
}

public func startEngine() {
    if !AudioKit.engine.isRunning {
        print("starting engine")
        do { try AudioKit.start() }
        catch {
            print("error starting audio", error)
        }
    }
}

public func stopEngine(){

    if AudioKit.engine.isRunning {
        print("stopping engine")
        do {
            try AudioKit.stop()
        }
        catch {
            print("error stopping audio", error)
        }
    }

    //playback doesn't work without this?
    mic = nil
}

@IBAction func changePitch(_ sender: UISlider) {
    self.setPitch(pitch:Double(sender.value))
}

public func getPitch() -> Double {
    return UserDefaults.standard.double(forKey: "pitchFactor")
}

public func setPitch(pitch:Double, saveValue:Bool = true) {
    player.pitch = pitch * 1000.0
    if saveValue {
        UserDefaults.standard.set(pitch, forKey: "pitchFactor")
        UserDefaults.standard.synchronize()
    }
}

func urlForDocument(_ named:String) -> URL? {
    let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
    let url = NSURL(fileURLWithPath: path)
    if let pathComponent = url.appendingPathComponent(named) {
        return pathComponent
    }
    return nil
}

}

来电的顺序为switchToMicrophonestartRecordingstopRecordingstartAudioPlaybacksaveToDisk,再次startAudioPlayback

请参阅 github repo 了解ViewController.swift

中的完整代码

renderToFile功能之后,当为播放器重启AudioKit时,会出现以下错误:

  

[mcmx] 338:输入总线0采样率为0

      [avae] AVAEInternal.h:103:_AVAE_CheckNoErr:[AVAudioEngineGraph.mm:1265:Initialize :( err = AUGraphParser :: InitializeActiveNodesInOutputChain(ThisGraph,kOutputChainOptimizedTraversal,* GetOutputNode(),isOutputChainActive)):error -10875

      [avae] AVAudioEngine.mm:149:- [AVAudioEngine prepare]:Engine @ 0x1c4008ae0:无法初始化,错误= -10875

     

[mcmx] 338:输入总线0采样率为0

      [avae] AVAEInternal.h:103:_AVAE_CheckNoErr:[AVAudioEngineGraph.mm:1265:Initialize :( err = AUGraphParser :: InitializeActiveNodesInOutputChain(ThisGraph,kOutputChainOptimizedTraversal,* GetOutputNode(),isOutputChainActive)):error -10875

     

启动音频时出错Error Domain = com.apple.coreaudio.avfaudio Code = -10875“(null)”UserInfo = {failed call = err = AUGraphParser :: InitializeActiveNodesInOutputChain(ThisGraph,kOutputChainOptimizedTraversal,* GetOutputNode(),isOutputChainActive)} ***

如果我将录音片段或离线渲染出来的话,这一切都完美无缺,但两者都没有。

2 个答案:

答案 0 :(得分:1)

问题可能出在您的执行顺序上,请尝试交换startAudioPlayback saveToDisk,以便它首先执行saveToDisk,然后再读取文件并播放它,即startAudioPlayback。

编辑:到目前为止,我认为我已经确定了问题所在。保存文件后,由于某种原因,正在录制的另一个临时文件将消失。我认为这需要缩小范围。

或者可以在不中断当前播放文件的情况下进行播放并将整个saveToDisk方法发送到后台线程。

在业余时间,我将尝试对其进行一些微调,然后告知您。

编辑2: 检查此https://stackoverflow.com/a/48133092/9497657 如果不知所措,请尝试在此处发布您的问题: https://github.com/audiokit/AudioKit/issues/

也请查看本教程: https://www.raywenderlich.com/145770/audiokit-tutorial-getting-started

向Aurelius Prochazka发送消息可能也很有用,因为他是AudioKit的开发人员,可以提供帮助。

答案 1 :(得分:0)

我可以通过将记录和回放合并到一个管道中来使其工作:

mixer = AKMixer(mic)
boostedMic = AKBooster(mixer, gain: 5)
amplitudeTracker = AKAmplitudeTracker(boostedMic)
micBooster = AKBooster(amplitudeTracker, gain: 0)

player = AKPlayer()
try? player.load(url: self.recordUrl)
player.prepare()
player.gain = 2.0

outputMixer = AKMixer(micBooster, player)
AudioKit.output = outputMixer