我已经使用苹果公司的AVFoundation建立了一个音频多轨播放器。我使用连接到AVAudioEngine的九个AVAudioPlayerNodes,它们可以在同一时间播放。在spriteKit中,在我的游戏场景中,我想在任何AVAudioPlayerNodes中检测文件的结尾,以便我可以运行后续代码。我怎么做?不幸的是,AVAudioPlayerNodes与简单的AVAudioPlayer类没有相同的便利功能。这是multiTrack函数:
import SpriteKit
import AVFoundation
var onesie = AVAudioPlayer()
var singleTrack = AVAudioPlayerNode()
var trackOne = AVAudioPlayerNode()
var trackTwo = AVAudioPlayerNode()
var trackThree = AVAudioPlayerNode()
var trackFour = AVAudioPlayerNode()
var trackFive = AVAudioPlayerNode()
var trackSix = AVAudioPlayerNode()
var trackSeven = AVAudioPlayerNode()
var trackEight = AVAudioPlayerNode()
var trackNine = AVAudioPlayerNode()
//variables to hold NSURLs as AVAudioFiles for use in AudioPlayer Nodes.
var single = AVAudioFile()
var one = AVAudioFile()
var two = AVAudioFile()
var three = AVAudioFile()
var four = AVAudioFile()
var five = AVAudioFile()
var six = AVAudioFile()
var seven = AVAudioFile()
var eight = AVAudioFile()
var nine = AVAudioFile()
//varibles for audio engine and player nodes. The "mixer" is part of the engine and already hooked up to the output
var engine = AVAudioEngine()
//reference the mixer
let mainMixer = engine.mainMixerNode
func audioMultiTrack(trackOneFN: String, trackTwoFN: String, trackThreeFN: String, trackFourFN: String, trackFiveFN: String, trackSixFN: String, trackSevenFN: String, trackEightFN: String, trackNineFN: String){
/*access audio filess for audio players (tracks)*/
//1
guard let trackOneFile = NSBundle.mainBundle().URLForResource(trackOneFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//2
guard let trackTwoFile = NSBundle.mainBundle().URLForResource(trackTwoFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//3
guard let trackThreeFile = NSBundle.mainBundle().URLForResource(trackThreeFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//4
guard let trackFourFile = NSBundle.mainBundle().URLForResource(trackFourFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//5
guard let trackFiveFile = NSBundle.mainBundle().URLForResource(trackFiveFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//6
guard let trackSixFile = NSBundle.mainBundle().URLForResource(trackSixFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//7
guard let trackSevenFile = NSBundle.mainBundle().URLForResource(trackSevenFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//8
guard let trackEightFile = NSBundle.mainBundle().URLForResource(trackEightFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//9
guard let trackNineFile = NSBundle.mainBundle().URLForResource(trackNineFN, withExtension: "mp3") else {
fatalError("File not found.")
}
//place NSURLs in AVAudioFile variables
//1
do {
try one = AVAudioFile(forReading: trackOneFile)
} catch {
fatalError("error loading track one file.")
}
//2
do {
try two = AVAudioFile(forReading: trackTwoFile)
} catch {
fatalError("error loading track two file.")
}
//3
do {
try three = AVAudioFile(forReading: trackThreeFile)
} catch {
fatalError("error loading track three file.")
}
//4
do {
try four = AVAudioFile(forReading: trackFourFile)
} catch {
fatalError("error loading track four file.")
}
//5
do {
try five = AVAudioFile(forReading: trackFiveFile)
} catch {
fatalError("error loading track five file.")
}
//6
do {
try six = AVAudioFile(forReading: trackSixFile)
} catch {
fatalError("error loading track six file.")
}
//7
do {
try seven = AVAudioFile(forReading: trackSevenFile)
} catch {
fatalError("error loading track six file.")
}
//8
do {
try eight = AVAudioFile(forReading: trackEightFile)
} catch {
fatalError("error loading track six file.")
}
//9
do {
try nine = AVAudioFile(forReading: trackNineFile)
} catch {
fatalError("error loading track six file.")
}
/*hook up audio units*/
//attach audio players (tracks) to audio engine
engine.attachNode(trackOne)
engine.attachNode(trackTwo)
engine.attachNode(trackThree)
engine.attachNode(trackFour)
engine.attachNode(trackFive)
engine.attachNode(trackSix)
engine.attachNode(trackSeven)
engine.attachNode(trackEight)
engine.attachNode(trackNine)
//connect the tracks to the mixer
engine.connect(trackOne, to: mainMixer, format: nil)
engine.connect(trackTwo, to: mainMixer, format: nil)
engine.connect(trackThree, to: mainMixer, format: nil)
engine.connect(trackFour, to: mainMixer, format: nil)
engine.connect(trackFive, to: mainMixer, format: nil)
engine.connect(trackSix, to: mainMixer, format: nil)
engine.connect(trackSeven, to: mainMixer, format: nil)
engine.connect(trackEight, to: mainMixer, format: nil)
engine.connect(trackNine, to: mainMixer, format: nil)
//connect audio files to audio players (tracks)
trackOne.scheduleFile(one, atTime: nil, completionHandler: nil)
trackTwo.scheduleFile(two, atTime: nil, completionHandler: nil)
trackThree.scheduleFile(three, atTime: nil, completionHandler: nil)
trackFour.scheduleFile(four, atTime: nil, completionHandler: nil)
trackFive.scheduleFile(five, atTime: nil, completionHandler: nil)
trackSix.scheduleFile(six, atTime: nil, completionHandler: nil)
trackSeven.scheduleFile(seven, atTime: nil, completionHandler: nil)
trackEight.scheduleFile(eight, atTime: nil, completionHandler: nil)
trackNine.scheduleFile(nine, atTime: nil, completionHandler: nil)
//try to start the audio engine
do {
try engine.start()
} catch {
print("error starting engine")
}
//function to create a precice time to start all audio players (tracks)
func startTime () ->AVAudioTime{
let samplerate = one.processingFormat.sampleRate
let sampleTime = AVAudioFramePosition(samplerate)
let time = AVAudioTime(sampleTime: sampleTime, atRate: samplerate)
return time
}
//start audio players (tracks) at precise time
trackOne.playAtTime(startTime())
trackTwo.playAtTime(startTime())
trackThree.playAtTime(startTime())
trackFour.playAtTime(startTime())
trackFive.playAtTime(startTime())
trackSix.playAtTime(startTime())
trackSeven.playAtTime(startTime())
trackEight.playAtTime(startTime())
trackNine.playAtTime(startTime())
}