编辑:在底部添加了新代码。
我非常擅长编码,所以我开始创建一个音板应用。我已经能够使用"率"命令减慢或加快音频,但现在我也想改变音调。理想情况下,我想要两个开关。一个可以加速音频并提高音高,一个减慢和降低,如果它们都关闭,它就会正常播放。下面是我的代码,除了改变音高外,它适用于所有内容任何输入都非常感谢。作为参考,sass很慢,芯片很快。
import UIKit
import AVFoundation
var Sounder1 = AVAudioPlayer()
var Sounder2 = AVAudioPlayer()
let sassFloat: Float = 0.5
let myInt = Int(sassFloat)
let chipFloat: Float = 3.0
let myInt2 = Int(chipFloat)
class ViewController: UIViewController {
@IBOutlet weak var sassSwitch: UISwitch!
@IBOutlet weak var chipSwitch: UISwitch!
override func viewDidLoad() {
super.viewDidLoad()
do {
Sounder1 = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "Sound1", ofType: "mp3")!))
Sounder1.prepareToPlay()
Sounder1.enableRate = true
}catch{
}
do {
Sounder2 = try AVAudioPlayer(contentsOf: URL.init(fileURLWithPath: Bundle.main.path(forResource: "Sound2", ofType: "wav")!))
Sounder2.prepareToPlay()
Sounder2.enableRate = true
}catch{
}
// Do any additional setup after loading the view, typically from a nib.
}
@IBAction func sassAction(_ sender: UISwitch) {
chipSwitch.setOn(false, animated: true)
}
@IBAction func chipAction(_ sender: UISwitch) {
sassSwitch.setOn(false, animated: true)
}
@IBAction func play(_ sender: Any) {
if sassSwitch.isOn {
Sounder1.rate = sassFloat
Sounder1.play()
} else if chipSwitch.isOn {
Sounder1.rate = chipFloat
Sounder1.play()
} else {
Sounder1.rate = 1.0
Sounder1.play()
}
}
@IBAction func play2(_ sender: Any) {
if sassSwitch.isOn {
Sounder2.rate = sassFloat
Sounder2.play()
} else if chipSwitch.isOn {
Sounder2.rate = chipFloat
Sounder2.play()
} else {
Sounder2.rate = 1.0
Sounder2.play()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
这是我根据@ericl建议提出的新代码。我仍然需要reset()
audioEngine,但我也有一些问题。
1)if
else if
else
声明在这种情况下是否真的有效?
2)我在哪里添加reset()
语句。
3)每次播放声音后是否需要分离节点?或者只是重置?
class ViewController: UIViewController {
@IBOutlet weak var sassSwitch: UISwitch!
@IBOutlet weak var chipSwitch: UISwitch!
@IBAction func sassAction(_ sender: UISwitch) {
chipSwitch.setOn(false, animated: true)
}
@IBAction func chipSwitch(_ sender: UISwitch) {
sassSwitch.setOn(false, animated: true)
}
///Playback Engine
private let audioEngine = AVAudioEngine()
///Player's Nodes
private let pitchPlayer = AVAudioPlayerNode()
private let timePitch = AVAudioUnitTimePitch()
///Audio Files to be played
private var audioFile1 = AVAudioFile()
private var audioFile2 = AVAudioFile()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
if let filePath = Bundle.main.path(forResource: "PeteNope", ofType:
"mp3") {
let filePathURL = URL(fileURLWithPath: filePath)
setPlayerFile(filePathURL)
}
if let filePath2 = Bundle.main.path(forResource: "Law_WOW", ofType:
"mp3") {
let filePath2URL = URL(fileURLWithPath: filePath2)
setPlayerFile2(filePath2URL)
}
}
private func setPlayerFile(_ fileURL: URL) {
do {
let file = try AVAudioFile(forReading: fileURL)
self.audioFile1 = file
} catch {
fatalError("Could not create AVAudioFile instance. error: \(error).")
}
}
private func setPlayerFile2(_ fileURL: URL) {
do {
let file = try AVAudioFile(forReading: fileURL)
self.audioFile2 = file
} catch {
fatalError("Could not create AVAudioFile instance. error: \(error).")
}
}
@IBAction func sound1Play(_ sender: UIButton) {
if sassSwitch.isOn {
timePitch.pitch = -300
timePitch.rate = 0.5
audioEngine.attach(pitchPlayer)
audioEngine.attach(timePitch)
audioEngine.connect(pitchPlayer, to: timePitch, format: audioFile1.processingFormat)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile1.processingFormat)
pitchPlayer.scheduleFile(audioFile1, at: nil, completionHandler: nil)
// Start the engine.
do {
try audioEngine.start()
} catch {
fatalError("Could not start engine. error: \(error).")
}
pitchPlayer.play()
} else if chipSwitch.isOn {
timePitch.pitch = +500
timePitch.rate = 2.0
audioEngine.attach(pitchPlayer)
audioEngine.attach(timePitch)
audioEngine.connect(pitchPlayer, to: timePitch, format: audioFile1.processingFormat)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile1.processingFormat)
pitchPlayer.scheduleFile(audioFile1, at: nil, completionHandler: nil)
// Start the engine.
do {
try audioEngine.start()
} catch {
fatalError("Could not start engine. error: \(error).")
}
pitchPlayer.play()
} else {
timePitch.pitch = +0
timePitch.rate = 1.0
audioEngine.attach(pitchPlayer)
audioEngine.attach(timePitch)
audioEngine.connect(pitchPlayer, to: timePitch, format: audioFile1.processingFormat)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile1.processingFormat)
pitchPlayer.scheduleFile(audioFile1, at: nil, completionHandler: nil)
// Start the engine.
do {
try audioEngine.start()
} catch {
fatalError("Could not start engine. error: \(error).")
}
pitchPlayer.play()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
答案 0 :(得分:0)
您可以使用AVAudioEngine
将AVAudioPlayerNode
附加到AVAudioUnitTimePitch
效果。我查看了Apple的AudioUnitV3Example示例代码,并拼凑了一个工作示例。您将要实现更多可在其中找到的状态和错误处理。无论如何,这里是:
在班级顶部定义一些私有变量:
/// Playback engine.
private let audioEngine = AVAudioEngine()
/// Engine's player node.
private let pitchPlayer = AVAudioPlayerNode()
private let timePitch = AVAudioUnitTimePitch()
/// File to play.
private var audioFile: AVAudioFile?
检查有效的音频文件:
override func viewDidLoad() {
super.viewDidLoad()
if let filePath = Bundle.main.path(forResource: "Sound1", ofType:
"mp3") {
let filePathURL = URL(fileURLWithPath: filePath)
setPlayerFile(filePathURL)
}
}
private func setPlayerFile(_ fileURL: URL) {
do {
let file = try AVAudioFile(forReading: fileURL)
self.audioFile = file
} catch {
fatalError("Could not create AVAudioFile instance. error: \(error).")
}
}
@IBAction func verySlowPlayback(sender: UIButton) {
timePitch.pitch = -300
timePitch.rate = 0.5
audioEngine.attach(pitchPlayer)
audioEngine.attach(timePitch)
audioEngine.connect(pitchPlayer, to: timePitch, format: audioFile?.processingFormat)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile?.processingFormat)
pitchPlayer.scheduleFile(audioFile!, at: nil, completionHandler: nil)
// Start the engine.
do {
try audioEngine.start()
} catch {
fatalError("Could not start engine. error: \(error).")
}
pitchPlayer.play()
}