如何监控设备的音频输出,以便能够判断声音是否从扬声器/耳机插孔中传出?

时间:2015-11-17 08:49:03

标签: ios swift audio swift2

我需要在设备开始输出音频时立即启动操作。我正在使用AVPlayer并从Parse传输音频文件。使用等待的其他方法(AVPlayer.currentTime()!= nil)和(AVPlayer.rate> 0)不够准确,我需要确切知道从设备输出音频的时间。我尝试过使用AVAudioEngine,然后附加一个AVAudioNode,它有一个AVAudioNodeBus,但我无法让它工作。任何建议或技巧都会非常好,谢谢!

这是我的AudioEngine代码。我在实例级别实例化AudioEngine。在创建standardFormat时,我不知道使用什么用于standardFormatWithSampleRate或用于channel块。当我尝试安装TapOnBus时,我不知道该怎么用于块,所以我把nil,但这也会触发错误。任何帮助将不胜感激,我是iOS开发人员的新手,已经多次阅读Apple的文档,但我无法解决它,我无法在网上找到任何最近的例子。

class TableViewController: UITableViewController, AVAudioPlayerDelegate {

var iDArray = [String]()
var NameArray = [String]()


var durationInSeconds = Double()

var currentSong = String()




override func viewDidLoad() {
    super.viewDidLoad()



    let ObjectIDQuery = PFQuery(className: "Songs")
    ObjectIDQuery.findObjectsInBackgroundWithBlock {
        (objectsArray: [PFObject]?, error: NSError?) -> Void in

        //objectsArray!.count != 0
        var objectIDs = objectsArray!

        for i in 0...objectIDs.count-1 {
                self.iDArray.append(objectIDs[i].valueForKey("objectId") as! String)
                self.NameArray.append(objectIDs[i].valueForKey("SongName") as! String)

                self.tableView.reloadData()
            }

    }

    do {
        try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
        print("AVAudioSession Category Playback OK")
        do {
            try AVAudioSession.sharedInstance().setActive(true)
            print("AVAudioSession is Active")
        } catch let error as NSError {
            print(error.localizedDescription)
        }
    } catch let error as NSError {
        print(error.localizedDescription)
    }



}

func grabSong () {


    let songQuery = PFQuery(className: "Songs")
    songQuery.getObjectInBackgroundWithId(iDArray[SelectedSongNumber], block: {
        (object: PFObject?, error : NSError?) -> Void in


        if let audioFile = object?["SongFile"] as? PFFile {
            let audioFileUrlString: String = audioFile.url!
            let audioFileUrl = NSURL(string: audioFileUrlString)!


            AudioPlayer = AVPlayer(URL: audioFileUrl)
            AudioPlayer.play()

    })

}

func audioFunction() {

    var audioPlayerNode = AVAudioNode()
    var audioBus = AVAudioNodeBus()


    var standardFormat = AVAudioFormat(standardFormatWithSampleRate: <#T##Double#>, channels: <#T##AVAudioChannelCount#>)


    AudioEngine.attachNode(audioPlayerNode)

    audioPlayerNode.outputFormatForBus(audioBus)

    audioPlayerNode.installTapOnBus(audioBus, bufferSize: 100, format: standardFormat, block: nil)

    if AudioEngine.running == true {
        print("the audio engine is running")
    } else {
        print("the audio engine is NOTTT running")
    }

}


func attachNode(audioNode : AVAudioNode) {
    AudioEngine.attachNode(audioNode)

    AudioEngine.outputNode
    print(AudioEngine.outputNode.description)

    if AudioEngine.running == true {
        print("the audio engine is running")
    } else {
        print("the audio engine is NOTTT running")
    }
}

override func tableView(tableView: UITableView, numberOfRowsInSection section: Int) -> Int {

    return iDArray.count
}


override func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell {
    let cell = tableView.dequeueReusableCellWithIdentifier("Cell")
    cell?.textLabel!.text = NameArray[indexPath.row]

    return cell!
}



override func tableView(tableView: UITableView, didSelectRowAtIndexPath indexPath: NSIndexPath) {

   SelectedSongNumber = indexPath.row
    grabSong()
}

}

我应该使用AVAudioSession吗?还是AVCaptureSession?

1 个答案:

答案 0 :(得分:1)

我会在AVPlayer上使用音频点按来了解音频实际播放/即将播放的时间。基本上,在音频播放扬声器/耳机插孔之前,您会收到音频点按回调。

一些复杂情况:我不确定如何获取某些流媒体类型的AVAsset曲目(请参阅,播客),但远程(和本地)mp3文件可以正常工作。

var player: AVPlayer?

func doit() {
    let url = NSURL(string: "URL TO YOUR POSSIBLY REMOTE AUDIO FILE")!
    let asset = AVURLAsset(URL:url)
    let playerItem = AVPlayerItem(asset: asset)

    let tapProcess: @convention(c) (MTAudioProcessingTap, CMItemCount, MTAudioProcessingTapFlags, UnsafeMutablePointer<AudioBufferList>, UnsafeMutablePointer<CMItemCount>, UnsafeMutablePointer<MTAudioProcessingTapFlags>) -> Void = {
        (tap, numberFrames, flags, bufferListInOut, numberFramesOut, flagsOut) -> Void in

        // Audio coming out!
        let status = MTAudioProcessingTapGetSourceAudio(tap, numberFrames, bufferListInOut, flagsOut, nil, numberFramesOut)
        print("get audio: \(status)\n")
    }

    var callbacks = MTAudioProcessingTapCallbacks(
        version: kMTAudioProcessingTapCallbacksVersion_0,
        clientInfo: UnsafeMutablePointer(Unmanaged.passUnretained(self).toOpaque()),
        `init`: nil,
        finalize: nil,
        prepare: nil,
        unprepare: nil,
        process: tapProcess)

    var tap: Unmanaged<MTAudioProcessingTap>?
    let err = MTAudioProcessingTapCreate(kCFAllocatorDefault, &callbacks, kMTAudioProcessingTapCreationFlag_PostEffects, &tap)

    if err != noErr {
        // TODO: something
    }

    let audioTrack = playerItem.asset.tracksWithMediaType(AVMediaTypeAudio).first!
    let inputParams = AVMutableAudioMixInputParameters(track: audioTrack)
    inputParams.audioTapProcessor = tap?.takeUnretainedValue()

    let audioMix = AVMutableAudioMix()
    audioMix.inputParameters = [inputParams]

    playerItem.audioMix = audioMix

    player = AVPlayer(playerItem: playerItem)
    player?.play()
}