我尝试使用AVCaptureDevice等来实现音量级别,它会编译并运行,但这些值似乎是随机的,我也会不断出现溢出错误。
修改
RMS范围也是正常的0到约20000?
if let audioCaptureDevice : AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio){
try audioCaptureDevice.lockForConfiguration()
let audioInput = try AVCaptureDeviceInput(device: audioCaptureDevice)
audioCaptureDevice.unlockForConfiguration()
if(captureSession.canAddInput(audioInput)){
captureSession.addInput(audioInput)
print("added input")
}
let audioOutput = AVCaptureAudioDataOutput()
audioOutput.setSampleBufferDelegate(self, queue: GlobalUserInitiatedQueue)
if(captureSession.canAddOutput(audioOutput)){
captureSession.addOutput(audioOutput)
print("added output")
}
//supposed to start session not on UI queue coz it takes a while
dispatch_async(GlobalUserInitiatedQueue) {
print("starting captureSession")
self.captureSession.startRunning()
}
}
...
func captureOutput(captureOutput: AVCaptureOutput!, let didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// Needs to be initialized somehow, even if we take only the address
var audioBufferList = AudioBufferList(mNumberBuffers: 1,
mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
//this needs to be in method otherwise only runs 125 times?
var blockBuffer: CMBlockBuffer?
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
nil,
&audioBufferList,
sizeof(audioBufferList.dynamicType),
nil,
nil,
UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
&buffer
)
let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
for buffer in abl{
let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
count: Int(buffer.mDataByteSize)/sizeof(Int16))
var sum:Int = 0
for sample in samples {
sum = sum + Int(sample*sample)
}
let rms = sqrt(Double(sum)/count)
}
答案 0 :(得分:3)
嘿,我不太了解它,但这是一个有效的Swift 5版本:
func captureOutput(_ output : AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection : AVCaptureConnection) {
var buffer: CMBlockBuffer? = nil
// Needs to be initialized somehow, even if we take only the address
let convenianceBuffer = AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil)
var audioBufferList = AudioBufferList(mNumberBuffers: 1,
mBuffers: convenianceBuffer)
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
bufferListSizeNeededOut: nil,
bufferListOut: &audioBufferList,
bufferListSize: MemoryLayout<AudioBufferList>.size(ofValue: audioBufferList),
blockBufferAllocator: nil,
blockBufferMemoryAllocator: nil,
flags: UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
blockBufferOut: &buffer
)
let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
for buffer in abl {
let originRawPtr = buffer.mData
let ptrDataSize = Int(buffer.mDataByteSize)
// From raw pointer to typed Int16 pointer
let buffPtrInt16 = originRawPtr?.bindMemory(to: Int16.self, capacity: ptrDataSize)
// From pointer typed Int16 to pointer of [Int16]
// So we can iterate on it simply
let unsafePtrByteSize = ptrDataSize/Int16.bitWidth
let samples = UnsafeMutableBufferPointer<Int16>(start: buffPtrInt16,
count: unsafePtrByteSize)
// Average of each sample squared, then root squared
let sumOfSquaredSamples = samples.map(Float.init).reduce(0) { $0 + $1*$1 }
let averageOfSomething = sqrt(sumOfSquaredSamples / Float(samples.count))
DispatchQueue.main.async {
print("Calulcus of something: \(String(averageOfSomething))" )
}
}
}
答案 1 :(得分:0)
看来我有它的工作。在进行任何操作之前,我已将sample
投放到Int64
。
for buffer in abl{
let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
count: Int(buffer.mDataByteSize)/sizeof(Int16))
var sum:Int64 = 0
for sample in samples {
let s = Int64(sample)
sum +=s*s
}
dispatch_async(dispatch_get_main_queue()) {
self.volLevel.text = String(sqrt(Float(sum/Int64(samples.count))))
}
答案 2 :(得分:0)
我玩过你的例子。这是一个完整的swift 2代码片段:
// also define a variable in class scope, otherwise captureOutput will not be called
var session : AVCaptureSession!
func startCapture() {
if let device : AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio){
do {
self.session = AVCaptureSession()
try device.lockForConfiguration()
let audioInput = try AVCaptureDeviceInput(device: device)
device.unlockForConfiguration()
if(self.session.canAddInput(audioInput)){
self.session.addInput(audioInput)
print("added input")
}
let audioOutput = AVCaptureAudioDataOutput()
audioOutput.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
if(self.session.canAddOutput(audioOutput)){
self.session.addOutput(audioOutput)
print("added output")
}
//supposed to start session not on UI queue coz it takes a while
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)) {
print("starting captureSession")
self.session.startRunning()
}
} catch {
}
}
}
func captureOutput(captureOutput: AVCaptureOutput!, let didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
var buffer: CMBlockBuffer? = nil
// Needs to be initialized somehow, even if we take only the address
var audioBufferList = AudioBufferList(mNumberBuffers: 1,
mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
sampleBuffer,
nil,
&audioBufferList,
sizeof(audioBufferList.dynamicType),
nil,
nil,
UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
&buffer
)
let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
for buffer in abl {
let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
count: Int(buffer.mDataByteSize)/sizeof(Int16))
var sum:Int64 = 0
for sample in samples {
let s = Int64(sample)
sum = (sum + s*s)
}
dispatch_async(dispatch_get_main_queue()) {
print( String(sqrt(Float(sum/Int64(samples.count)))))
}
}
}