iOS:无需创建文件的音频录制计量

时间:2016-06-10 09:16:09

标签: ios audio avcapturesession avaudiorecorder

我想在不创建文件的情况下计量音频输入电平。 我已经阅读了各种帖子并找到了这些方法:

  1. 使用AVAudioRecorder并写入/ dev / null(这确实是一个好主意但在iOS 8/9上对我不起作用)
  2. 使用对我有用的AVCaptureSession,但我不知道这可能会对AppStore提交(第一个问题)产生什么影响。另一个不好的方面似乎是它会在iPad2上不断使用大约10%的CPU (第二个问题)
  3. 这是我的代码,受到类似SO帖子的启发:

    func startCapture() {
        if let device : AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio){
    
            do {
                self.session = AVCaptureSession()
                try device.lockForConfiguration()
    
                let audioInput = try AVCaptureDeviceInput(device: device)
                device.unlockForConfiguration()
    
                if(self.session.canAddInput(audioInput)){
                    self.session.addInput(audioInput)
                    print("added input")
                }
    
    
                let audioOutput = AVCaptureAudioDataOutput()
    
                audioOutput.setSampleBufferDelegate(self, queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0))
    
                if(self.session.canAddOutput(audioOutput)){
                    self.session.addOutput(audioOutput)
                    print("added output")
                }
    
    
                //supposed to start session not on UI queue coz it takes a while
                dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)) {
                    print("starting captureSession")
                    self.session.startRunning()
                }
    
            } catch {
    
            }
        }
    }
    
    func captureOutput(captureOutput: AVCaptureOutput!, let didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
    
        var buffer: CMBlockBuffer? = nil
    
        // Needs to be initialized somehow, even if we take only the address
        var audioBufferList = AudioBufferList(mNumberBuffers: 1,
                                              mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
    
        CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
            sampleBuffer,
            nil,
            &audioBufferList,
            sizeof(audioBufferList.dynamicType),
            nil,
            nil,
            UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
            &buffer
        )
    
        let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
    
        for buffer in abl {
            let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(buffer.mData),
                                                            count: Int(buffer.mDataByteSize)/sizeof(Int16))
    
            var sum:Int64 = 0
    
            for sample in samples {
                let s = Int64(sample)
                sum = (sum + s*s)
            }
    
            dispatch_async(dispatch_get_main_queue()) {
    
                print( String(sqrt(Float(sum/Int64(samples.count)))))
            }
        }
    }
    

0 个答案:

没有答案