我正在研究从iPhone麦克风获取音频数据并将其发送到套接字,我已经尝试AVAudioEngine来获取音频缓冲区,但有些人不知道如何调用它。那么请你建议我在现场获取录制缓冲区数据的更好方法。
override func viewDidLoad() {
super.viewDidLoad()
// initialize engine
engine = AVAudioEngine()
guard nil != engine?.inputNode else {
// @TODO: error out
return
}
SocketIOManager.sharedInstance.socket.on("listen") {data, ack in
let BuffurData:Data = data[0] as! Data
// let playData = self?.audioBufferToNSData(PCMBuffer: BuffurData as! AVAudioPCMBuffer)
do {
// let data = NSData(bytes: &BuffurData, length: BuffurData.count)
let player = try AVAudioPlayer(data:BuffurData)
player.play()
} catch let error as NSError {
print(error.description)
}
print("socket connected \(data)")
}
}
func installTap() {
engine = AVAudioEngine()
guard let engine = engine, let input = engine.inputNode else {
// @TODO: error out
return
}
let format = input.inputFormat(forBus: 0)
input.installTap(onBus: 0, bufferSize:4096, format:format, block: { [weak self] buffer, when in
guard let this = self else {
return
}
// writing to file: for testing purposes only
do {
try this.file!.write(from: buffer)
} catch {
}
if let channel1Buffer = buffer.floatChannelData?[0] {
let test = self?.copyAudioBufferBytes(buffer)
let stram = self?.toNSData(PCMBuffer: buffer)
SocketIOManager.sharedInstance.socket.emit("talk",stram!);
// socket.on("listen", function (data)
/*! @property floatChannelData
@abstract Access the buffer's float audio samples.
@discussion
floatChannelData returns pointers to the buffer's audio samples if the buffer's format is
32-bit float, or nil if it is another format.
The returned pointer is to format.channelCount pointers to float. Each of these pointers
is to "frameLength" valid samples, which are spaced by "stride" samples.
If format.interleaved is false (as with the standard deinterleaved float format), then
the pointers will be to separate chunks of memory. "stride" is 1.
If format.interleaved is true, then the pointers will refer into the same chunk of interleaved
samples, each offset by 1 frame. "stride" is the number of interleaved channels.
*/
// @TODO: send data, better to pass into separate queue for processing
}
})
engine.prepare()
do {
try engine.start()
} catch {
// @TODO: error out
}
}
答案 0 :(得分:0)
试试这段代码:
var audioPlayerQueue = DispatchQueue(label: "audioPlayerQueue", qos: DispatchQoS.userInteractive)
var peerAudioPlayer: AVAudioPlayerNode = AVAudioPlayerNode()
var peerInputFormat: AVAudioFormat?
override func viewDidLoad() {
super.viewDidLoad()
// initialize engine
engine = AVAudioEngine()
guard nil != engine?.inputNode else {
// @TODO: error out
return
}
engine.attach(self.peerAudioPlayer)
self.peerInputFormat = AVAudioFormat.init(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)
self.peerAudioEngine.connect(peerAudioPlayer, to: self.peerAudioEngine.mainMixerNode, format: peerInput?.outputFormat(forBus: 0))
do {
peerAudioEngine.prepare()
try peerAudioEngine.start()
} catch let error {
print(error.localizedDescription)
}
SocketIOManager.sharedInstance.socket.on("listen") { data, ack in
let pcmBuffer = toPCMBuffer(data: data)
self.audioPlayerQueue.async {
self.peerAudioPlayer.scheduleBuffer(pcmBuffer, completionHandler: nil)
if self.peerAudioEngine.isRunning {
self.peerAudioPlayer.play()
} else {
do {
try self.peerAudioEngine.start()
} catch {
print(error.localizedDescription)
}
}
}
}
print("socket connected \(data)")
}
func toPCMBuffer(data: NSData) -> AVAudioPCMBuffer {
let audioFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false) // given NSData audio format
let PCMBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(data.length) / audioFormat.streamDescription.pointee.mBytesPerFrame)
PCMBuffer.frameLength = PCMBuffer.frameCapacity
let channels = UnsafeBufferPointer(start: PCMBuffer.floatChannelData, count: Int(PCMBuffer.format.channelCount))
data.getBytes(UnsafeMutableRawPointer(channels[0]) , length: data.length)
return PCMBuffer
}
func installTap() {
engine = AVAudioEngine()
guard let engine = engine, let input = engine.inputNode else {
// @TODO: error out
return
}
let format = input.inputFormat(forBus: 0)
input.installTap(onBus: 0, bufferSize: 4410, format: format, block: { (buffer: AVAudioPCMBuffer, AVAudioTime) in
guard let this = self else {
return
}
let stram = self?.toNSData(PCMBuffer: buffer)
SocketIOManager.sharedInstance.socket.emit("talk",stram!)
})
do {
engine.prepare()
try engine.start()
} catch {
// @TODO: error out
}
}
// **Edit: For Enable Lound Speaker**
func speakerEnabled(_ enabled:Bool) -> Bool {
let session = AVAudioSession.sharedInstance()
var options = session.categoryOptions
if (enabled) {
options.insert(.defaultToSpeaker)
} else {
options.remove(.defaultToSpeaker)
}
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with: options)
return true
}