几天来,我一直在努力解决这个问题,但似乎找不到任何遇到相同问题或解决方法的人。
使用类别AVAudioSessionPlayAndRecord时,音频输出会在播放更高频率的声音(约16khz以上)时自发改变音量。这种移动会导致声音中的咔嗒声/失真。随附的是此现象的在线捕捉的屏幕截图。 Line-in capture
使用AVAudioPlayer,音频队列服务和AudioUnit可以观察到这种情况,并且仅在AVAudioSessionCategoryPlayAndRecord中发生。 AVAudioSessionCategoryPlayback可以按预期工作。
在iPhone7,iPhone 8和iPhone 10上已观察到此行为,并且在iPhone 6上进行的测试未显示该问题。
以下是使用Audio Queue Services的最小样本应用程序中的代码,它们以19.5khz和48khz的采样率播放正弦波。
对此有任何建议,我们将不胜感激。
struct outputData
{
var phaseDiff: Double = (6.28318530717958 * 19500.0) / 48000.0
var phase: Double = 0
}
var buffers: [AudioQueueBufferRef] = []
var queue: AudioQueueRef? = nil
var data = outputData()
override func viewDidLoad() {
do{
if #available(iOS 10.0, *) {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord, mode: AVAudioSessionModeMeasurement, options: .defaultToSpeaker)
} else {
// Fallback on earlier versions
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord, with: .defaultToSpeaker)
}
try AVAudioSession.sharedInstance().setActive(true)
} catch let error as NSError {
NSLog("Error setting up AVAudioSession: \(error)")
}
statusLabel.text = "Idle";
setupAudioQueues()
}
var outputCallback: AudioQueueOutputCallback = {(
inUserData: UnsafeMutableRawPointer?,
inAQ: AudioQueueRef,
inBuffer: AudioQueueBufferRef) -> Void in
// Write out sine wave
var data = inUserData!.assumingMemoryBound(to: outputData.self).pointee
memset(inBuffer.pointee.mAudioData, 0, Int(inBuffer.pointee.mAudioDataBytesCapacity))
inBuffer.pointee.mAudioDataByteSize = inBuffer.pointee.mAudioDataBytesCapacity
let floatPtr = inBuffer.pointee.mAudioData.bindMemory(to: Float.self, capacity: Int(inBuffer.pointee.mAudioDataBytesCapacity))
for i in 0 ..< inBuffer.pointee.mAudioDataBytesCapacity / 4
{
floatPtr[Int(i)] = Float(sin(data.phase) * 0.2)
data.phase = fmod(data.phase + data.phaseDiff, 6.28318530717958 )
}
let status = AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, nil)
if(status != 0)
{
NSLog("Failed to enqueue buffer: \(status)")
}
}
func setupAudioQueues()
{
var queue: AudioQueueRef? = nil
var streamDesc = AudioStreamBasicDescription.init(mSampleRate: 48000.0, mFormatID: kAudioFormatLinearPCM, mFormatFlags: kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked, mBytesPerPacket: 4, mFramesPerPacket: 1, mBytesPerFrame: 4, mChannelsPerFrame: 1, mBitsPerChannel: 32, mReserved: 0)
let status = AudioQueueNewOutput(&streamDesc, outputCallback, &data, nil, nil, 0, &queue)
if(status != 0)
{
NSLog("Failed to open new AudioQueueOutput: \(status)")
return
}
for _ in 0..<3 {
var buffer: AudioQueueBufferRef?
AudioQueueAllocateBuffer(queue!, 4800, &buffer)
outputCallback(&data, queue!, buffer!)
if let buffer: AudioQueueBufferRef = buffer {
buffers.append(buffer)
}
}
self.queue = queue
}
@IBAction func playSineWave(_ sender: AnyObject) {
DispatchQueue.main.async {
self.statusLabel.text = "Playing Sine Wave";
}
if(continuePlaying)
{
return
}
AudioQueueStart(self.queue!, nil)
}