我正在与AVAudioEngine一起每秒在SCNNode上播放HRTF音频,但出现错误:
2019-02-12 19:13:06.402139+0800 Test[520:83464] [avae] AVAEInternal.h:70:_AVAE_Check: required condition is false: [AVAudioEngine.mm:361:AttachNode: (!nodeimpl->HasEngineImpl())]
2019-02-12 19:13:06.402423+0800 Test[520:83464] *** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: !nodeimpl->HasEngineImpl()'
我的代码:
import UIKit
import Foundation
import SceneKit
import AVFoundation
class AudioLayerEngine: AVAudioEngine {
var engine:AVAudioEngine!
var environment:AVAudioEnvironmentNode!
var outputBuffer:AVAudioPCMBuffer!
var voicePlayer:AVAudioPlayerNode!
var multiChannelEnabled:Bool!
let delay = AVAudioUnitDelay()
let distortion = AVAudioUnitDistortion()
let reverb = AVAudioUnitReverb()
override init(){
super.init()
engine = AVAudioEngine()
environment = AVAudioEnvironmentNode()
engine.attach(self.environment)
voicePlayer = AVAudioPlayerNode()
engine.attach(voicePlayer)
voicePlayer.volume = 1.0
outputBuffer = loadVoice()
wireEngine()
startEngine()
voicePlayer.scheduleBuffer(self.outputBuffer, completionHandler: nil)
voicePlayer.play()
}
func startEngine(){
do{
try engine.start()
}catch{
print("error loading engine")
}
}
func loadVoice()->AVAudioPCMBuffer{
let URL = NSURL(fileURLWithPath: Bundle.main.path(forResource: "beep-07", ofType: "wav")!)
do{
let soundFile = try AVAudioFile(forReading: URL as URL, commonFormat: AVAudioCommonFormat.pcmFormatFloat32, interleaved: false)
outputBuffer = AVAudioPCMBuffer(pcmFormat: soundFile.processingFormat, frameCapacity: AVAudioFrameCount(soundFile.length))
do{
try soundFile.read(into: outputBuffer)
}catch{
print("something went wrong with loading the buffer into the sound file")
}
print("returning buffer")
return outputBuffer
}catch{
}
return outputBuffer
}
func wireEngine(){
loadDistortionPreset(preset: AVAudioUnitDistortionPreset.multiCellphoneConcert)
engine.attach(distortion)
engine.attach(delay)
engine.connect(voicePlayer, to: distortion, format: self.outputBuffer.format)
engine.connect(distortion, to: delay, format: self.outputBuffer.format)
engine.connect(delay, to: environment, format: self.outputBuffer.format)
engine.connect(environment, to: engine.outputNode, format: constructOutputFormatForEnvironment())
}
func constructOutputFormatForEnvironment()->AVAudioFormat{
let outputChannelCount = self.engine.outputNode.outputFormat(forBus: 1).channelCount
let hardwareSampleRate = self.engine.outputNode.outputFormat(forBus: 1).sampleRate
let environmentOutputConnectionFormat = AVAudioFormat(standardFormatWithSampleRate: hardwareSampleRate, channels: outputChannelCount)
multiChannelEnabled = false
return environmentOutputConnectionFormat!
}
func loadDistortionPreset(preset: AVAudioUnitDistortionPreset){
distortion.loadFactoryPreset(preset)
}
func createPlayer(node: SCNNode){
let player = AVAudioPlayerNode()
distortion.loadFactoryPreset(AVAudioUnitDistortionPreset.speechCosmicInterference)
engine.attach(player)
engine.attach(distortion)
engine.connect(player, to: distortion, format: outputBuffer.format)
engine.connect(distortion, to: environment, format: constructOutputFormatForEnvironment())
let algo = AVAudio3DMixingRenderingAlgorithm.HRTF
player.renderingAlgorithm = algo
player.reverbBlend = 1.0
player.renderingAlgorithm = AVAudio3DMixingRenderingAlgorithm.HRTF
}
}
我得到了AVAudioEngine类here,并从ViewController中对其进行了调用。通过AudioLayerEngine().createPlayer(node: myNode)
调用它是正确的方法吗?
我浏览了一些类似的帖子,尝试拆卸和断开引擎,但问题仍然无法解决。感谢所有建议。