MCSessionDelegate - 接收流时获取字节数

时间:2017-06-06 02:45:34

标签: ios swift audio streaming multipeer-connectivity

我对Swift / iOS很陌生,几天前刚刚开始学习。我正在使用Swift 3并希望开发两个iPhone应用程序,可以使用多对等连接将音频流从麦克风发送到其他iPhone设备。第一个应用程序将是发言人的应用程序,另一个应用程序将是聆听者的应用程序。

之前,我学习了如何通过此useful tutorial

宣传,浏览和邀请同行

我学会了如何从麦克风获取音频数据并将其转换为此answer和此answer的字节数。非常感谢Rhythmic Fistman

所以,我的代码是这些文章包含的内容的组合。

这是侦听器应用程序的ViewController

import UIKit
import MultipeerConnectivity
import AVFoundation


class ColorSwitchViewController: UIViewController {

    @IBOutlet weak var connectionsLabel: UILabel!

    let colorService = ColorServiceManager()
    var engine = AVAudioEngine()
    let player = AVAudioPlayerNode()

    // Somewhere, schedule the stream in the mainRunLoop, set the delegate and open it. Choose the peer that you want to connect
    var inputStream = InputStream()
    var inputStreamIsSet: Bool!
    var outputStreamIsSet: Bool!
    public let peerID = MCPeerID(displayName: UIDevice.current.name)

    //MARK: Private Functions
    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    func copyAudioBufferBytes(_ audioBuffer: AVAudioPCMBuffer) -> [UInt8] {
        let srcLeft = audioBuffer.floatChannelData![0]
        let bytesPerFrame = audioBuffer.format.streamDescription.pointee.mBytesPerFrame
        let numBytes = Int(bytesPerFrame * audioBuffer.frameLength)

        // initialize bytes to 0 (how to avoid?)
        var audioByteArray = [UInt8] (repeating: 0, count: numBytes)

        // copy data from buffer
        srcLeft.withMemoryRebound(to: UInt8.self, capacity: numBytes) { srcByteData in
            audioByteArray.withUnsafeMutableBufferPointer {
                $0.baseAddress!.initialize(from: srcByteData, count: numBytes)
            }
        }

        return audioByteArray
    }

    func bytesToAudioBuffer(_ buf: [UInt8]) -> AVAudioPCMBuffer {
        let fmt = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true)
        let frameLength = UInt32(buf.count) / fmt.streamDescription.pointee.mBytesPerFrame

        let audioBuffer = AVAudioPCMBuffer(pcmFormat: fmt, frameCapacity: frameLength)
        audioBuffer.frameLength = frameLength

        let dstLeft = audioBuffer.floatChannelData![0]

        buf.withUnsafeBufferPointer {
            let src = UnsafeRawPointer($0.baseAddress!).bindMemory(to: Float.self, capacity: Int(frameLength))
            dstLeft.initialize(from: src, count: Int(frameLength))
        }

        return audioBuffer
    }

    override func viewDidLoad() {
        super.viewDidLoad()
        colorService.delegate = self
    }

    @IBAction func redTapped() {
        self.change(color: .red)
        colorService.send(colorName: "red")

    }

    @IBAction func yellowTapped() {
        self.change(color: .yellow)
        colorService.send(colorName: "yellow")
    }

    func change(color : UIColor) {
        UIView.animate(withDuration: 0.2) {
            self.view.backgroundColor = color
        }
    }

}

extension ColorSwitchViewController : ColorServiceManagerDelegate {

    func connectedDevicesChanged(manager: ColorServiceManager, connectedDevices: [String]) {
        OperationQueue.main.addOperation {
            self.connectionsLabel.text = "Connections: \(connectedDevices)"
        }
    }

    func colorChanged(manager: ColorServiceManager, colorString: String) {
        OperationQueue.main.addOperation {
            switch colorString {
            case "red":
                self.change(color: .red)
            case "yellow":
                self.change(color: .yellow)
            default:
                NSLog("%@", "Unknown color value received: \(colorString)")
            }
        }
    }

    func streamReceived(manager: ColorServiceManager, stream: InputStream, streamName: String, fromPeer: MCPeerID) {
        NSLog("%@", "name " + fromPeer.displayName)
        if streamName == "stream" && fromPeer != peerID {

            NSLog("%@", "voice received")

            stream.schedule(in: RunLoop.current, forMode: .defaultRunLoopMode)
            stream.open()

            var bytes = [UInt8](repeating: 0, count: 17640)

            if (stream.hasBytesAvailable == true) {
                NSLog("%@", "has bytes available...")
            } else {
                NSLog("%@", "has NO byte ...")
            }

            let result = stream.read(&bytes, maxLength: bytes.count)
            if result == 0 {
                print("Stream at capacity")
            } else if result == -1 {
                print("Operation failed: \(String(describing: stream.streamError))")
            } else {
                print("The number of bytes read is \(result)")
            }

            let audioBuffer = self.bytesToAudioBuffer(bytes) //Here is where the app crashes

            engine.attach(player)
            let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true)
            engine.connect(player, to: engine.mainMixerNode, format: outputFormat)


            do {
                try engine.start()

                player.scheduleBuffer(audioBuffer, completionHandler: nil)
                player.play()
            } catch let error {
                print(error.localizedDescription)

            }
        }
    }

}

扬声器应用程序的ViewController类似,只是它包含用于发送流的代码,并且不包含用于接收的代码

// ....

    override func viewDidLoad() {
        super.viewDidLoad()
        colorService.delegate = self

    }


    func startStream() {
        let input = engine.inputNode!
        engine.attach(player)

        let bus = 0
        let inputFormat = input.inputFormat(forBus: bus)
        engine.connect(player, to: engine.mainMixerNode, format: inputFormat)

        NSLog("%@", "sendStream: to \(self.colorService.session.connectedPeers.count) peers")

        if self.colorService.session.connectedPeers.count > 0 {
            do {
                let outputStream = try self.colorService.session.startStream(withName: "stream", toPeer: self.colorService.session.connectedPeers.first!)
                outputStream.schedule(in: RunLoop.main, forMode:RunLoopMode.defaultRunLoopMode)
                outputStream.open()

                let bus = 0
                let inputFormat = input.inputFormat(forBus: bus)
                input.installTap(onBus: bus, bufferSize: 2048, format: inputFormat, block: {
                    (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
                    self.player.scheduleBuffer(buffer)
                    let audioBuffer = self.copyAudioBufferBytes(buffer)
//                    NSLog("%@", "speaking...")
                    let result = outputStream.write(audioBuffer, maxLength: audioBuffer.count)
                    if result == 0 {
                        print("Stream at capacity")
                    } else if result == -1 {
                        print("Operation failed: \(String(describing: outputStream.streamError))")
                    } else {
                        print("The number of bytes written is \(result)")
                    }
                })


                try! engine.start()
                player.play()


            }
            catch let error {
                NSLog("%@", "Error for sending: \(error)")
            }
        }

    }

    func stopStream() {
        engine.inputNode?.removeTap(onBus: 0)
        player.stop()
    }

    @IBAction func redTapped() {
        self.change(color: .red)
        colorService.send(colorName: "red")
        self.startStream()
    }

    @IBAction func yellowTapped() {
        self.change(color: .yellow)
        colorService.send(colorName: "yellow")
        self.stopStream()
    }

    // ...

不幸的是,在侦听器方面,我得到的应用程序接收没有可用字节的流。 NSLog("%@", "has NO byte ...")被召唤了。我想知道听众应用程序是否真的收到了音频流。

那么,我的错误是什么?任何帮助将不胜感激。先感谢您。

0 个答案:

没有答案