快速使用WebRTC进行iOS屏幕共享(ReplayKit)

时间:2020-07-07 12:25:03

标签: swift webrtc screensharing replaykit

我实现了用于视频通话的webrtc SDK,并且可以正常工作。在视频通话中,用户可以与其他用户共享屏幕。 我正在使用RePlayKit进行屏幕共享。

这是我的代码

class SampleHandler: RPBroadcastSampleHandler {
       
    var peerConnectionFactory: RTCPeerConnectionFactory?
     var localVideoSource: RTCVideoSource?
     var videoCapturer: RTCVideoCapturer?
     var peerConnection: RTCPeerConnection?
    var localVideoTrack: RTCVideoTrack?

    var disconnectSemaphore: DispatchSemaphore?
    var videodelegate:VideoViewExtensionDelegate?
    var signalClient: SignalingClient? = nil
    let config = Config.default
   
    
    let peerConnectionfactory: RTCPeerConnectionFactory = {
        RTCInitializeSSL()
        let videoEncoderFactory = RTCDefaultVideoEncoderFactory()
        let videoDecoderFactory = RTCDefaultVideoDecoderFactory()
        return RTCPeerConnectionFactory(encoderFactory: videoEncoderFactory, decoderFactory: videoDecoderFactory)
    }()
    
    private let mediaConstrains = [kRTCMediaConstraintsOfferToReceiveAudio: kRTCMediaConstraintsValueFalse,
    kRTCMediaConstraintsOfferToReceiveVideo: kRTCMediaConstraintsValueTrue]


    static let kAudioSampleType = RPSampleBufferType.audioMic


    override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
        
        self.SetupVideo()
    }
    override func broadcastPaused() {
        // User has requested to pause the broadcast. Samples will stop being delivered.
//        self.audioTrack?.isEnabled = false
//        self.screenTrack?.isEnabled = false
    }

    override func broadcastResumed() {
        // User has requested to resume the broadcast. Samples delivery will resume.
//        self.audioTrack?.isEnabled = true
//        self.screenTrack?.isEnabled = true
    }

    override func broadcastFinished() {
        // User has requested to finish the broadcast.
    }
    
    func SetupVideo() {

        if #available(iOS 13.0, *) {
                let webSocketProvider: WebSocketProvider
                webSocketProvider = NativeWebSocket(url: self.config.signalingServerUrl)
        
        self.signalClient = SignalingClient(webSocket: webSocketProvider)
           let config = RTCConfiguration()
                     // config.iceServers = [RTCIceServer(urlStrings: iceServers)]
                      
                      config.iceServers = [RTCIceServer(urlStrings:["//turn & sturn serber url"],
                                                        username:"//username",
                                                        credential:"//password")]
                      
                      
                      // Unified plan is more superior than planB
                     // config.sdpSemantics = .unifiedPlan
                      
                      // gatherContinually will let WebRTC to listen to any network changes and send any new candidates to the other client
            config.continualGatheringPolicy = .gatherContinually
                
           let screenSharefactory = self.peerConnectionfactory
                      
            let constraints = RTCMediaConstraints(mandatoryConstraints: nil,
                                                            optionalConstraints: ["DtlsSrtpKeyAgreement":kRTCMediaConstraintsValueTrue])
        self.peerConnection = screenSharefactory.peerConnection(with: config, constraints: constraints, delegate: nil)
                
        self.peerConnection?.delegate = self


        self.localVideoSource = screenSharefactory.videoSource()
        self.videoCapturer = RTCVideoCapturer(delegate: self.localVideoSource!)
        self.localVideoTrack =  screenSharefactory.videoTrack(with: self.localVideoSource!, trackId:"video0")
         
            //    let videoSender = newpeerConnection.sender(withKind: kRTCMediaStreamTrackKindVideo, streamId: "stream")
            //    videoSender.track = videoTrack
                
                let mediaStream: RTCMediaStream = (screenSharefactory.mediaStream(withStreamId: "1"))
                mediaStream.addVideoTrack(self.localVideoTrack!)
               self.peerConnection?.add(mediaStream)
                
                self.offer(peerconnection: self.peerConnection!) { (sdp) in
                    self.signalClient?.send(sdp: sdp)
                }
            
        }
    }
    
    
    func offer(peerconnection : RTCPeerConnection ,completion: @escaping (_ sdp: RTCSessionDescription) -> Void) {

        
        let constrains = RTCMediaConstraints(mandatoryConstraints: self.mediaConstrains,
                                             optionalConstraints: nil)
        peerconnection.offer(for: constrains) { (sdp, error) in
            guard let sdp = sdp else {
                return
            }
            
            peerconnection.setLocalDescription(sdp, completionHandler: { (error) in
                completion(sdp)
            })
        }
    }

    override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {

        switch sampleBufferType {
        case RPSampleBufferType.video:

          guard let imageBuffer: CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
                  break
              }
          let rtcpixelBuffer = RTCCVPixelBuffer(pixelBuffer: imageBuffer)
          let timeStampNs: Int64 = Int64(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * 1000000000)
          let videoFrame =  RTCVideoFrame(buffer: rtcpixelBuffer, rotation: RTCVideoRotation._0, timeStampNs: timeStampNs)
            print(videoFrame)
            self.localVideoSource?.capturer(self.videoCapturer!, didCapture: videoFrame)



            break

        case RPSampleBufferType.audioApp:
            if (SampleHandler.kAudioSampleType == RPSampleBufferType.audioApp) {
            //    ExampleCoreAudioDeviceCapturerCallback(audioDevice, sampleBuffer)
            }
            break

        case RPSampleBufferType.audioMic:
            if (SampleHandler.kAudioSampleType == RPSampleBufferType.audioMic) {

            }
            break
        @unknown default:
            return
        }
    }
}

extension SampleHandler: RTCPeerConnectionDelegate {
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didChange stateChanged: RTCSignalingState) {
        debugPrint("peerConnection new signaling state: \(stateChanged)")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
        debugPrint("peerConnection did add stream")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didRemove stream: RTCMediaStream) {
        debugPrint("peerConnection did remote stream")
    }
    
    func peerConnectionShouldNegotiate(_ peerConnection: RTCPeerConnection) {
        debugPrint("peerConnection should negotiate")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceConnectionState) {
        debugPrint("peerConnection new connection state: \(newState)")
       
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didChange newState: RTCIceGatheringState) {
        debugPrint("peerConnection new gathering state: \(newState)")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
        debugPrint("peerConnection did Generate")

    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didRemove candidates: [RTCIceCandidate]) {
        debugPrint("peerConnection did remove candidate(s)")
    }
    
    func peerConnection(_ peerConnection: RTCPeerConnection, didOpen dataChannel: RTCDataChannel) {
        debugPrint("peerConnection did open data channel")
       // self.remoteDataChannel = dataChannel
    }
}


extension SampleHandler: RTCDataChannelDelegate {
    func dataChannelDidChangeState(_ dataChannel: RTCDataChannel) {
        debugPrint("dataChannel did change state: \(dataChannel.readyState)")
    }
    
    func dataChannel(_ dataChannel: RTCDataChannel, didReceiveMessageWith buffer: RTCDataBuffer) {
       
    }
}

我正在使用此WEBRTC项目https://github.com/stasel/WebRTC-iOS 我正在获取CMSampleBuffer数据和RTCVideoFrame并正确传递。 CMSampleBuffer数据可供参考。

CMSampleBuffer 0x100918370 retainCount: 5 allocator: 0x1e32175e0
    invalid = NO
    dataReady = YES
    makeDataReadyCallback = 0x0
    makeDataReadyRefcon = 0x0
    formatDescription = <CMAudioFormatDescription 0x282bf0e60 [0x1e32175e0]> {
    mediaType:'soun' 
    mediaSubType:'lpcm' 
    mediaSpecific: {
        ASBD: {
            mSampleRate: 44100.000000 
            mFormatID: 'lpcm' 
            mFormatFlags: 0xe 
            mBytesPerPacket: 4 
            mFramesPerPacket: 1 
            mBytesPerFrame: 4 
            mChannelsPerFrame: 2 
            mBitsPerChannel: 16     } 
        cookie: {(null)} 
        ACL: {(null)}
        FormatList Array: {
            Index: 0 
            ChannelLayoutTag: 0x650002 
            ASBD: {
            mSampleRate: 44100.000000 
            mFormatID: 'lpcm' 
            mFormatFlags: 0xe 
            mBytesPerPacket: 4 
            mFramesPerPacket: 1 
            mBytesPerFrame: 4 
            mChannelsPerFrame: 2 
            mBitsPerChannel: 16     }} 
    } 
    extensions: {(null)}
}
    sbufToTrackReadiness = 0x0
    numSamples = 1024
    outputPTS = {190371138262458/1000000000 = 190371.138}(based on cachedOutputPresentationTimeStamp)
    sampleTimingArray[1] = {
        {PTS = {190371138262458/1000000000 = 190371.138}, DTS = {INVALID}, duration = {1/44100 = 0.000}},
    }
    dataBuffer = 0x2828f1050

我被困在这里,不知道我的代码出了什么问题。我们非常感谢您的帮助。

1 个答案:

答案 0 :(得分:0)

webrtc是对等连接。如果要与另一屏幕共享屏幕。 您必须在屏幕上创建cvpixelBuffer(使用称为RTCCustomcaptureframe的类)并创建webrtcclient以与其他设备连接。 (对于更简单的设置webrtc客户端,只需将其拆分即可)

您不能通过单个对等连接来连接3台设备。