我有两个视图控制器正在执行不同的音频相关功能。第一个视图viewController,需要录制音频,并且还要对正在录制的声音的幅度和频率进行实时处理。第二个视图控制器需要播放录制的音频,并使用播放头做一些事情。
我的问题是,当我在iphone 6设备的两个屏幕之间来回走动几次时,跟踪器的幅度和频率仅等于0.这种行为在我的模拟器上不会发生。同样,我尝试了在工作流程中的不同点开始+停止跟踪器和/或音频引擎的不同组合,但后来我只是遇到了https://github.com/audiokit/AudioKit/issues/643的崩溃。我一直想弄清楚的一件事是如何制作aukiokit的单例实例,所以我只能启动引擎一次,但我无法从示例中得到它。
以下是使用audiokit,麦克风和跟踪器的代码的关键部分:
工作流程中的第一个视图控制器
class ViewController: UIViewController {
@IBAction func analyzeSpeechAction(_ sender: Any) {
//Audiokit.stop()
stopTimers()
let vc = AnalysisViewController(nibName: "AnalysisViewController", bundle: nil)
print("ANALYZING")
do {
try player.reloadFile()
} catch { print("Errored reloading.") }
let recordedDuration = player != nil ? player.audioFile.duration : 0
if recordedDuration > 0.0 {
recorder.stop()
}
tracker.stop()
navigationController?.pushViewController(vc, animated: true)
}
func stopTimers(){
if timerTest != nil {
timerTest!.invalidate()
timerTest = nil
}
if timer != nil {
timer!.invalidate()
timer = nil
}
}
var mic: AKMicrophone!
var tracker: AKFrequencyTracker!
var silence: AKBooster!
var mainMixer: AKMixer!
var timerTest : Timer?
var micMixer: AKMixer!
var recorder: AKNodeRecorder!
var player: AKAudioPlayer!
var tape: AKAudioFile!
var micBooster: AKBooster!
override func viewDidLoad() {
super.viewDidLoad()
AKAudioFile.cleanTempDirectory()
AKSettings.bufferLength = .medium
do {
try AKSettings.setSession(category: .playAndRecord, with: .allowBluetoothA2DP)
} catch {
AKLog("Could not set session category.")
}
AKSettings.defaultToSpeaker = true
AKSettings.audioInputEnabled = true
mic = AKMicrophone()
tracker = AKFrequencyTracker(mic)
tracker.start()
silence = AKBooster(tracker, gain: 0)
micMixer = AKMixer(mic)
micBooster = AKBooster(micMixer)
// Will set the level of microphone monitoring
micBooster.gain = 0
recorder = try? AKNodeRecorder(node: micMixer)
if let file = recorder.audioFile {
player = try? AKAudioPlayer(file: file)
}
mainMixer = AKMixer(micBooster)
do {
try recorder.record()
} catch { print("Errored recording.") }
AudioKit.output = mainMixer
}
viewDidAppear(){
AudioKit.stop()
AudioKit.output = silence
AudioKit.start()
if timer == nil{
timer = Timer.scheduledTimer(timeInterval: 1, target:self,
selector: #selector(ViewController.updateCounter), userInfo: nil, repeats: true)
}
}
@objc func updateUI() {
frame = frame + 1
print("AMP")
print(tracker.amplitude)
print(tracker.frequency)
}
}
第二视图控制器
class AnalysisViewController: UIViewController {
@IBOutlet weak var barChartView: LineChartView!
@IBAction func recordSpeechNavigation(_ sender: Any) {
let vc = ViewController(nibName: "ViewController", bundle: nil)
//AudioKit.stop()
player.stop()
navigationController?.pushViewController(vc, animated: true)
}
@IBAction func playButtonAction(_ sender: UIButton) {
playAudio()
}
override func viewDidLoad() {
super.viewDidLoad()
//move to completion handler
player.completionHandler = {() -> Void in
self.playerStatusButton.setTitle("Play", for: .normal)
self.timerTest!.invalidate()
self.relativePlayheadPosition = 0.0
self.movePlayhead()
print("complete")
}
}
}
我为了达到这个目的而捣乱了一堆例子,所以如果我不正确地使用某些AudioKit方法,我会道歉。关键行为发生在第一个视图的viewDidLoad(),viewDidAppear()和updateUI()与第二个视图的recordSpeechNavigation()之间。
有没有人能够了解可能发生的事情或我如何正确实施这些内容?