将AVCaptureSession输出到iPhone扬声器

时间:2018-09-26 03:10:55

标签: ios swift audio

我对Swift编程还很陌生,我正在尝试制作一个简单的麦克风应用程序。如果您的手机已连接到蓝牙扬声器,则我希望您在麦克风中说的音频通过扬声器输出。我想我的输入可以正常工作,但是我无法在iPhone可以访问的任何设备上播放输出(如果连接了它自己的扬声器或蓝牙扬声器)

这是我的代码:

import UIKit
import AVFoundation

class MicVC: UIViewController, AVCaptureAudioDataOutputSampleBufferDelegate {

    @IBOutlet weak var offButton: UIButton!
    @IBOutlet weak var onButton: UIButton!

    let onColor = UIColor(red: 142/255, green: 17/255, blue: 7/255, alpha: 1.0)


    let settings = [
        AVFormatIDKey: kAudioFormatMPEG4AAC,
        AVNumberOfChannelsKey : 1,
        AVSampleRateKey : 44100]
    let captureSession = AVCaptureSession()

    let queue = DispatchQueue(label: "AudioSessionQueue", attributes: [])
    let captureDevice = AVCaptureDevice.default(for: AVMediaType.audio)
    var audioInput : AVCaptureDeviceInput? = nil
    var audioOutput : AVCaptureAudioDataOutput? = nil

    override func viewDidLoad() {
        super.viewDidLoad()
        onButton.backgroundColor = .orange
        offButton.backgroundColor = .gray

        onButton.layer.cornerRadius = 10
        offButton.layer.cornerRadius = 10
    }

    func captureOutput(_ output: AVCaptureOutput,
                       didOutput sampleBuffer: CMSampleBuffer,
                       from connection: AVCaptureConnection) {
        output.connection(with: AVMediaType(rawValue: AVAudioSessionPortBuiltInSpeaker))

        print("Audio data received")

    //        audioOutput = output as? AVCaptureAudioDataOutput
    //        audioOutput?.connection(with: .audio)

    }

    @IBAction func stopPressed(_ sender: UIButton) {
        onButton.backgroundColor = .orange
        offButton.backgroundColor = .gray

        captureSession.stopRunning()
        print("stop")
    }

    @IBAction func onPressed(_ sender: UIButton) {

        onButton.backgroundColor = .white
        offButton.backgroundColor = onColor


        do {
            try captureDevice?.lockForConfiguration()
            audioInput = try AVCaptureDeviceInput(device: captureDevice!)
            captureDevice?.unlockForConfiguration()
            print(captureDevice!)
            audioOutput = AVCaptureAudioDataOutput()
            audioOutput?.setSampleBufferDelegate(self, queue: queue)
    //                audioOutput?.audioSettings = settings
            } catch {
                print("Capture devices could not be set")
                print(error.localizedDescription)
            }

        if audioInput != nil && audioOutput != nil {
            captureSession.beginConfiguration()
            if (captureSession.canAddInput(audioInput!)) {
                captureSession.addInput(audioInput!)
            } else {
                print("cannot add input")
            }
            if (captureSession.canAddOutput(audioOutput!)) {
                captureSession.addOutput(audioOutput!)
            } else {
                print("cannot add output")
            }
            captureSession.commitConfiguration()

            print("Starting capture session")
            captureSession.startRunning()
        }
    }
}

0 个答案:

没有答案