我需要播放声音并能够从缓冲区录制旋律。但是我不知道如何设置AVAudioSession
类别和/或AVAudioPlayerNode
以实现我的目标。 声音是在播放器节点中安排的。如果我的理解正确,AVAudioRecorder
仅从麦克风录制音乐,而不是通过AVAudioPlayerNode
播放音乐。所以,这是我的尝试:
首先,我设置了一个会话:
NSError *error = nil;
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord
withOptions:AVAudioSessionCategoryOptionMixWithOthers
error:&error];
if (error) {
NSLog(@"AVAudioSession error %ld, %@", error.code, error.localizedDescription);
}
[audioSession setActive:YES error:&error];
if (error) {
NSLog(@"AVAudioSession error %ld, %@", error.code, error.localizedDescription);
}
设置要记录的文件:
NSString* docs = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).firstObject stringByAppendingPathComponent:@"Recording.caf"];
NSURL* url = [NSURL fileURLWithPath:docs];
NSError* error = nil;
self.fileForRecording = [[AVAudioFile alloc] initForWriting:url
settings:[self.engine.inputNode inputFormatForBus:0].settings
error:&error];
if (error) {
NSLog(@"CREATE FILE ERROR %@", error);
}
然后是引擎:
self.engine = [AVAudioEngine new];
self.player = [AVAudioPlayerNode new];
AVAudioOutputNode *output = self.engine.outputNode;
[self.engine attachNode:self.player];
[self.engine connect:self.player to:output fromBus: 0 toBus: 0 format: format];
[self.engine prepare];
以及录制方法:
- (void)startRecording {
AVAudioFormat* recordingFormat = [self.engine.outputNode outputFormatForBus:0];
if (recordingFormat.sampleRate > 0) {
typeof(self) weakSelf = self;
[self.engine.inputNode installTapOnBus:0
bufferSize:1024
format:recordingFormat
block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
NSError* error;
[weakSelf.fileForRecording writeFromBuffer:buffer error:&error];
NSLog(@"WRITE ERROR %@", error);
}];
}
}
在总线上安装水龙头时,我尝试使用nil
作为记录格式,在这种情况下,从未调用过block
。我尝试使用[self.engine.mainMixerNode outputFormatForBus:0];
,这会导致崩溃。而是使用self.engine.outputNode
也会导致崩溃。
请帮忙:)
答案 0 :(得分:1)
我在Swift上做了一个空项目。我的引擎图如下所示。我有2个声音和2个播放器节点,每个声音一个。这些播放器已连接到引擎mainMixerNode
。当我想同时记录两个播放器的音乐时,我会从mainMixerNode
输出中获得缓冲。这行得通!
class ViewController: UIViewController {
var engine = AVAudioEngine()
var recordingFile: AVAudioFile?
var audioPlayer: AVAudioPlayer?
let playerSaw = AVAudioPlayerNode()
let playerDk = AVAudioPlayerNode()
var bufferSaw: AVAudioPCMBuffer?
var bufferDk: AVAudioPCMBuffer?
override func viewDidLoad() {
super.viewDidLoad()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(
AVAudioSessionCategoryPlayAndRecord)
} catch let error as NSError {
print("audioSession error: \(error.localizedDescription)")
}
self.bufferSaw = self.createBuffer(forFileNamed: "16_saw")
self.bufferDk = self.createBuffer(forFileNamed: "23_dk")
if self.bufferSaw != nil &&
self.bufferDk != nil {
self.engine.attach(self.playerSaw)
self.engine.attach(self.playerDk)
let mainMixerNode = self.engine.mainMixerNode
self.engine.connect(self.playerSaw, to:mainMixerNode, format:self.bufferSaw!.format)
self.engine.connect(self.playerDk, to:mainMixerNode, format:self.bufferDk!.format)
self.engine.prepare()
do {
try self.engine.start()
} catch (let error) {
print("START FAILED", error)
}
}
}
@IBAction func record(sender: AnyObject) {
self.createRecordingFile()
self.engine.mainMixerNode.installTap(onBus: 0,
bufferSize: 1024,
format: self.engine.mainMixerNode.outputFormat(forBus: 0)) { (buffer, time) -> Void in
do {
try self.recordingFile?.write(from: buffer)
} catch (let error) {
print("RECORD ERROR", error);
}
return
}
}
@IBAction func stop(sender: AnyObject) {
self.engine.mainMixerNode.removeTap(onBus: 0)
}
fileprivate func startEngineIfNotRunning() {
if (!self.engine.isRunning) {
do {
try self.engine.start()
} catch (let error) {
print("RESTART FAILED", error)
}
}
}
@IBAction func playSaw(sender: UIButton) {
if let buffer = self.bufferSaw {
self.startEngineIfNotRunning()
sender.isSelected = !sender.isSelected
if (sender.isSelected) {
self.playerSaw.scheduleBuffer(buffer,
at: nil,
options: .loops,
completionHandler: nil)
self.playerSaw.play()
} else {
self.playerSaw.pause()
}
}
}
@IBAction func playDk(sender: UIButton) {
if let buffer = self.bufferDk {
self.startEngineIfNotRunning()
sender.isSelected = !sender.isSelected
if (sender.isSelected) {
self.playerDk.scheduleBuffer(buffer,
at: nil,
options: .loops,
completionHandler: nil)
self.playerDk.play()
} else {
self.playerDk.pause()
}
}
}
@IBAction func playAudio(_ sender: AnyObject) {
if let url = self.recordingFile?.url {
do {
self.audioPlayer = try AVAudioPlayer(contentsOf:
url)
self.audioPlayer?.prepareToPlay()
self.audioPlayer?.play()
} catch let error as NSError {
print("audioPlayer error: \(error.localizedDescription)")
}
}
}
fileprivate func createRecordingFile() {
if let dir = NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.documentDirectory, FileManager.SearchPathDomainMask.allDomainsMask, true).first {
var url = URL(fileURLWithPath: dir)
url.appendPathComponent("my_file.caf")
let format = self.engine.outputNode.inputFormat(forBus: 0)
do {
self.recordingFile = try AVAudioFile(forWriting: url, settings:format.settings)
} catch (let error) {
print("CREATE RECORDING FILE ERROR", error);
}
}
}
fileprivate func createBuffer(forFileNamed fileName: String) -> AVAudioPCMBuffer? {
var res: AVAudioPCMBuffer?
if let fileURL = Bundle.main.url(forResource: fileName, withExtension: "caf") {
do {
let file = try AVAudioFile(forReading: fileURL)
res = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity:AVAudioFrameCount(file.length))
if let _ = res {
do {
try file.read(into: res!)
} catch (let error) {
print("ERROR read file", error)
}
}
} catch (let error) {
print("ERROR file creation", error)
}
}
return res
}
}