如何在Swift上使用AVAudioConverter,AVAudioCompressedBuffer和AVAudioPCMBuffer将AAC转换为PCM?
在WWDC 2015上,有人说507 Session,AVAudioConverter可以对PCM缓冲区进行编码和解码,仅显示了编码示例,但未显示带有解码的示例。 我尝试解码,但无法正常工作。我不知道什么:(
来电:
//buffer - it's AVAudioPCMBuffer from AVAudioInputNode(AVAudioEngine)
let aacBuffer = AudioBufferConverter.convertToAAC(from: buffer, error: nil) //has data
let data = Data(bytes: aacBuffer!.data, count: Int(aacBuffer!.byteLength)) //has data
let aacReverseBuffer = AudioBufferConverter.convertToAAC(from: data) //has data
let pcmReverseBuffer = AudioBufferConverter.convertToPCM(from: aacBuffer2!, error: nil) //zeros data. data object exist, but filled by zeros
用于转换的代码:
class AudioBufferFormatHelper {
static func PCMFormat() -> AVAudioFormat? {
return AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)
}
static func AACFormat() -> AVAudioFormat? {
var outDesc = AudioStreamBasicDescription(
mSampleRate: 44100,
mFormatID: kAudioFormatMPEG4AAC,
mFormatFlags: 0,
mBytesPerPacket: 0,
mFramesPerPacket: 0,
mBytesPerFrame: 0,
mChannelsPerFrame: 1,
mBitsPerChannel: 0,
mReserved: 0)
let outFormat = AVAudioFormat(streamDescription: &outDesc)
return outFormat
}
}
class AudioBufferConverter {
static func convertToAAC(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioCompressedBuffer? {
let outputFormat = AudioBufferFormatHelper.AACFormat()
let outBuffer = AVAudioCompressedBuffer(format: outputFormat!, packetCapacity: 8, maximumPacketSize: 768)
self.convert(from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static func convertToPCM(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioPCMBuffer? {
let outputFormat = AudioBufferFormatHelper.PCMFormat()
guard let outBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!, frameCapacity: 4410) else {
return nil
}
outBuffer.frameLength = 4410
self.convert(from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static func convertToAAC(from data: Data) -> AVAudioCompressedBuffer? {
let nsData = NSData(data: data)
let inputFormat = AudioBufferFormatHelper.AACFormat()
let buffer = AVAudioCompressedBuffer(format: inputFormat!, packetCapacity: 8, maximumPacketSize: 768)
buffer.byteLength = UInt32(data.count)
buffer.packetCount = 8
buffer.data.copyMemory(from: nsData.bytes, byteCount: nsData.length)
buffer.packetDescriptions!.pointee.mDataByteSize = 4
return buffer
}
private static func convert(from sourceBuffer: AVAudioBuffer, to destinationBuffer: AVAudioBuffer, error outError: NSErrorPointer) {
//init converter
let inputFormat = sourceBuffer.format
let outputFormat = destinationBuffer.format
let converter = AVAudioConverter(from: inputFormat, to: outputFormat)
converter!.bitRate = 32000
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return sourceBuffer
}
_ = converter!.convert(to: destinationBuffer, error: outError, withInputFrom: inputBlock)
}
}
结果AVAudioPCMBuffer的数据为零。 在邮件中,我看到了错误:
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 1: err = -1, packet length: 0
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 3: err = -1, packet length: 0
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 5: err = -1, packet length: 0
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 7: err = -1, packet length: 0
答案 0 :(得分:4)
您的尝试存在一些问题:
在转换数据时,您没有设置 multiple 数据包描述-> AVAudioCompressedBuffer
。您需要创建它们,因为AAC数据包的大小可变。您可以从原始AAC缓冲区中复制它们,也可以手动(ouch)或使用AudioFileStream
API从数据中解析它们。
,您一次又一次地重新创建AVAudioConverter
-对每个缓冲区一次,丢弃它们的状态。例如AAC编码器出于自身的原因而需要添加2112帧静音,然后才能开始再现音频,因此重新创建转换器将为您带来很多的静音。
,您会一遍又一遍地向AVAudioConverter
的输入块提供相同的缓冲区。每个缓冲区只能显示一次。
32000的比特率不起作用(对我来说)
这就是我现在能想到的。请尝试对您的代码进行以下修改,现在您将其称为:
(请注意,我将某些单声道更改为立体声,所以我可以在Mac上播放往返缓冲区,因为其麦克风输入是立体声,您可能需要将其改回)
(pps显然这里正在进行某种往返/串行化/反序列化尝试,但是您到底要做什么?您想将AAC音频从一台设备传输到另一台设备吗?因为它可能更容易让另一个AVPlayer
这样的API会播放结果流,而不是自己处理数据包)
let aacBuffer = AudioBufferConverter.convertToAAC(from: buffer, error: nil)!
let data = Data(bytes: aacBuffer.data, count: Int(aacBuffer.byteLength))
let packetDescriptions = Array(UnsafeBufferPointer(start: aacBuffer.packetDescriptions, count: Int(aacBuffer.packetCount)))
let aacReverseBuffer = AudioBufferConverter.convertToAAC(from: data, packetDescriptions: packetDescriptions)!
// was aacBuffer2
let pcmReverseBuffer = AudioBufferConverter.convertToPCM(from: aacReverseBuffer, error: nil)
class AudioBufferFormatHelper {
static func PCMFormat() -> AVAudioFormat? {
return AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)
}
static func AACFormat() -> AVAudioFormat? {
var outDesc = AudioStreamBasicDescription(
mSampleRate: 44100,
mFormatID: kAudioFormatMPEG4AAC,
mFormatFlags: 0,
mBytesPerPacket: 0,
mFramesPerPacket: 0,
mBytesPerFrame: 0,
mChannelsPerFrame: 1,
mBitsPerChannel: 0,
mReserved: 0)
let outFormat = AVAudioFormat(streamDescription: &outDesc)
return outFormat
}
}
class AudioBufferConverter {
static var lpcmToAACConverter: AVAudioConverter! = nil
static func convertToAAC(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioCompressedBuffer? {
let outputFormat = AudioBufferFormatHelper.AACFormat()
let outBuffer = AVAudioCompressedBuffer(format: outputFormat!, packetCapacity: 8, maximumPacketSize: 768)
//init converter once
if lpcmToAACConverter == nil {
let inputFormat = buffer.format
lpcmToAACConverter = AVAudioConverter(from: inputFormat, to: outputFormat!)
// print("available rates \(lpcmToAACConverter.applicableEncodeBitRates)")
// lpcmToAACConverter!.bitRate = 96000
lpcmToAACConverter.bitRate = 32000 // have end of stream problems with this, not sure why
}
self.convert(withConverter:lpcmToAACConverter, from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static var aacToLPCMConverter: AVAudioConverter! = nil
static func convertToPCM(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioPCMBuffer? {
let outputFormat = AudioBufferFormatHelper.PCMFormat()
guard let outBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!, frameCapacity: 4410) else {
return nil
}
//init converter once
if aacToLPCMConverter == nil {
let inputFormat = buffer.format
aacToLPCMConverter = AVAudioConverter(from: inputFormat, to: outputFormat!)
}
self.convert(withConverter: aacToLPCMConverter, from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static func convertToAAC(from data: Data, packetDescriptions: [AudioStreamPacketDescription]) -> AVAudioCompressedBuffer? {
let nsData = NSData(data: data)
let inputFormat = AudioBufferFormatHelper.AACFormat()
let maximumPacketSize = packetDescriptions.map { $0.mDataByteSize }.max()!
let buffer = AVAudioCompressedBuffer(format: inputFormat!, packetCapacity: AVAudioPacketCount(packetDescriptions.count), maximumPacketSize: Int(maximumPacketSize))
buffer.byteLength = UInt32(data.count)
buffer.packetCount = AVAudioPacketCount(packetDescriptions.count)
buffer.data.copyMemory(from: nsData.bytes, byteCount: nsData.length)
buffer.packetDescriptions!.pointee.mDataByteSize = UInt32(data.count)
buffer.packetDescriptions!.initialize(from: packetDescriptions, count: packetDescriptions.count)
return buffer
}
private static func convert(withConverter: AVAudioConverter, from sourceBuffer: AVAudioBuffer, to destinationBuffer: AVAudioBuffer, error outError: NSErrorPointer) {
// input each buffer only once
var newBufferAvailable = true
let inputBlock : AVAudioConverterInputBlock = {
inNumPackets, outStatus in
if newBufferAvailable {
outStatus.pointee = .haveData
newBufferAvailable = false
return sourceBuffer
} else {
outStatus.pointee = .noDataNow
return nil
}
}
let status = withConverter.convert(to: destinationBuffer, error: outError, withInputFrom: inputBlock)
print("status: \(status.rawValue)")
}
}