我有一个使用AVAudioSession
来录制音频并对其进行分析以确定频率以进行调谐的应用。问题是AVAudioSession.sharedInstance().sampleRate
报告的采样率与实际记录率不符。
在某些设备(iPhone SE和iPad Mini 2)上,报告的采样率为44,100,与实际数据匹配。 但是在某些其他设备(iPhone 6S,iPhone 8)上,系统报告的采样率为48,000,但只有将采样解释为好像每秒记录44,100个采样时,返回的采样才有意义。
我尝试用AVAudioSession.sharedInstance().setPreferredSampleRate()
控制采样率,但这似乎完全没有效果。
为什么报告的汇率与实际数据不符?
以下是一些代码:
class Myrecorder: NSObject, AVCaptureAudioDataOutputSampleBufferDelegate {
let session : AVCaptureSession!
override init() {
session = AVCaptureSession()
super.init()
}
enum SetupResult {
case OK, noInput, noPermission, internalError, waiting
}
var isSetup:Bool = false
func setup( callback:@escaping (()->Bool)) -> SetupResult {
do {
let sharedInstance = AVAudioSession.sharedInstance()
try sharedInstance.setCategory( AVAudioSessionCategoryPlayAndRecord )
} catch {
return SetupResult.internalError
}
let device = AVCaptureDevice.default( for: AVMediaType.audio )
guard (device != nil) else {
return SetupResult.noInput
}
let status = AVCaptureDevice.authorizationStatus( for:AVMediaType.audio )
guard status != .denied else {
return SetupResult.noPermission
}
guard status != .notDetermined else {
AVCaptureDevice.requestAccess(for: AVMediaType.audio ){ granted in
_ = callback()
}
return SetupResult.waiting
}
var input : AVCaptureDeviceInput
do {
try device!.lockForConfiguration()
try input = AVCaptureDeviceInput( device: device! )
device!.unlockForConfiguration()
} catch {
device!.unlockForConfiguration()
return SetupResult.internalError
}
let output = AVCaptureAudioDataOutput()
output.setSampleBufferDelegate(self, queue: Intonia.realTimeQueue)
session.beginConfiguration()
guard session.canAddInput( input ) else {
return SetupResult.internalError
}
session.addInput( input )
guard session.canAddOutput( output ) else {
return SetupResult.internalError
}
session.addOutput( output )
session.commitConfiguration()
isSetup = true;
return SetupResult.OK
}
static func getSampleRate() -> Double {
return AVAudioSession.sharedInstance().sampleRate
}
func captureOutput(_ captureOutput: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection){
var buffer: CMBlockBuffer? = nil
var audioBufferList = AudioBufferList(
mNumberBuffers: 1,
mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil)
)
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
nil,
&audioBufferList,
MemoryLayout<AudioBufferList>.size,
nil,
nil,
UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
&buffer
)
let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
for buff in abl {
let samples = UnsafeBufferPointer<Int16>(
start: UnsafePointer(OpaquePointer(buff.mData)),
count: Int(buff.mDataByteSize)/MemoryLayout<Int16>.size)
let newSamples = [Int16](samples)
MyApp.putSamples_A(samples: newSamples )
}
}
func record() {
MyApp.realTimeQueue.async {
self.session.startRunning()
}
}
func pause() {
session.stopRunning()
}
}