与WebRTC共享实时网络摄像头流

时间:2019-06-20 14:31:02

标签: javascript web webrtc openwebrtc

我正在制作一个简单的登录/注册系统,假设有人登录到他的帐户的USER1可以发布公共实时流,而另一个人也登录到他的帐户的USER2可以查看该流由USER1发布。 我尝试使用WebRTC实施此技术。 USER1可以共享流,但是USER2登录后无法获取流。

HTML代码:

<video id=\"video1\" playsinline autoplay muted></video>
<video id=\"video2\" playsinline autoplay></video>
<div>
    <button id=\"startButton\">Start Camera</button>                                 
    <button id=\"callButton\">Go Live</button>
    <button id=\"hangupButton\">Stop Stream</button>
</div>

WebRTC代码:

<script>
    'use strict';

    const startButton = document.getElementById('startButton');
    const callButton = document.getElementById('callButton');
    const hangupButton = document.getElementById('hangupButton');

    callButton.disabled = true;
    hangupButton.disabled = true;
    startButton.onclick = start;
    callButton.onclick = call;
    hangupButton.onclick = hangup;

    const video1 = document.querySelector('video#video1');
    const video2 = document.querySelector('video#video2');
    const video3 = document.querySelector('video#video3');

    let pc1Local;
    let pc1Remote;
    let pc2Local;
    let pc2Remote;
    const offerOptions = {
        offerToReceiveAudio: 1,
        offerToReceiveVideo: 1
    };

    function gotStream(stream) {
        console.log('Received local stream');
        video1.srcObject = stream;
        window.localStream = stream;
        callButton.disabled = false;
    }

    function start() {
        console.log('Requesting local stream');
        startButton.disabled = true;
        navigator.mediaDevices
            .getUserMedia({
                audio: true,
                video: true
            })
            .then(gotStream)
            .catch(e => console.log('getUserMedia() error: ', e));
    }

    function call() {
        callButton.disabled = true;
        hangupButton.disabled = false;
        console.log('Starting calls');
        const audioTracks = window.localStream.getAudioTracks();
        const videoTracks = window.localStream.getVideoTracks();
        if (audioTracks.length > 0) {
            console.log(`Using audio device: ${audioTracks[0].label}`);
        }
        if (videoTracks.length > 0) {
            console.log(`Using video device: ${videoTracks[0].label}`);
        }
        // Create an RTCPeerConnection via the polyfill.
        const servers = null;
        pc1Local = new RTCPeerConnection(servers);
        pc1Remote = new RTCPeerConnection(servers);
        pc1Remote.ontrack = gotRemoteStream1;
        pc1Local.onicecandidate = iceCallback1Local;
        pc1Remote.onicecandidate = iceCallback1Remote;
        console.log('pc1: created local and remote peer connection objects');

        pc2Local = new RTCPeerConnection(servers);
        pc2Remote = new RTCPeerConnection(servers);
        pc2Remote.ontrack = gotRemoteStream2;
        pc2Local.onicecandidate = iceCallback2Local;
        pc2Remote.onicecandidate = iceCallback2Remote;
        console.log('pc2: created local and remote peer connection objects');

        window.localStream.getTracks().forEach(track => pc1Local.addTrack(track, window.localStream));
        console.log('Adding local stream to pc1Local');
        pc1Local
            .createOffer(offerOptions)
            .then(gotDescription1Local, onCreateSessionDescriptionError);

        window.localStream.getTracks().forEach(track => pc2Local.addTrack(track, window.localStream));
    console.log('Adding local stream to pc2Local');
    pc2Local.createOffer(offerOptions)
      .then(gotDescription2Local, onCreateSessionDescriptionError);
  }

  function onCreateSessionDescriptionError(error) {
    console.log(`Failed to create session description: ${error.toString()}`);
  }

  function gotDescription1Local(desc) {
    pc1Local.setLocalDescription(desc);
    console.log(`Offer from pc1Local\n${desc.sdp}`);
    pc1Remote.setRemoteDescription(desc);
    // Since the 'remote' side has no media stream we need
    // to pass in the right constraints in order for it to
    // accept the incoming offer of audio and video.
    pc1Remote.createAnswer().then(gotDescription1Remote, onCreateSessionDescriptionError);
    }

  function gotDescription1Remote(desc) {
    pc1Remote.setLocalDescription(desc);
    console.log(`Answer from pc1Remote\n${desc.sdp}`);
    pc1Local.setRemoteDescription(desc);
  }

  function gotDescription2Local(desc) {
    pc2Local.setLocalDescription(desc);
    console.log(`Offer from pc2Local\n${desc.sdp}`);
    pc2Remote.setRemoteDescription(desc);
    // Since the 'remote' side has no media stream we need
    // to pass in the right constraints in order for it to
    // accept the incoming offer of audio and video.
    pc2Remote.createAnswer().then(gotDescription2Remote, onCreateSessionDescriptionError);
  }

  function gotDescription2Remote(desc) {
    pc2Remote.setLocalDescription(desc);
    console.log(`Answer from pc2Remote\n${desc.sdp}`);
    pc2Local.setRemoteDescription(desc);
  }

  function hangup() {
    console.log('Ending calls');
    pc1Local.close();
    pc1Remote.close();
    pc2Local.close();
    pc2Remote.close();
    pc1Local = pc1Remote = null;
    pc2Local = pc2Remote = null;
    hangupButton.disabled = true;
    callButton.disabled = false;
  }

  function gotRemoteStream1(e) {
    if (video2.srcObject !== e.streams[0]) {
      video2.srcObject = e.streams[0];
      console.log('pc1: received remote stream');
    }
  }

  function gotRemoteStream2(e) {
    if (video3.srcObject !== e.streams[0]) {
      video3.srcObject = e.streams[0];
      console.log('pc2: received remote stream');
    }
  }

  function iceCallback1Local(event) {
    handleCandidate(event.candidate, pc1Remote, 'pc1: ', 'local');
  }

  function iceCallback1Remote(event) {
    handleCandidate(event.candidate, pc1Local, 'pc1: ', 'remote');
  }

  function iceCallback2Local(event) {
    handleCandidate(event.candidate, pc2Remote, 'pc2: ', 'local');
  }

  function iceCallback2Remote(event) {
    handleCandidate(event.candidate, pc2Local, 'pc2: ', 'remote');
  }

  function handleCandidate(candidate, dest, prefix, type) {
    dest.addIceCandidate(candidate)
      .then(onAddIceCandidateSuccess, onAddIceCandidateError);
    console.log(`${prefix}New ${type} ICE candidate: ${candidate ? candidate.candidate : '(null)'}`);
  }

  function onAddIceCandidateSuccess() {
    console.log('AddIceCandidate success.');
  }

  function onAddIceCandidateError(error) {
    console.log(`Failed to add ICE candidate: ${error.toString()}`);
  }
</script>

<script>
'use strict';
  export default {
    'Video and buttons state change during multiple peer connection setup': (browser) => {
      const path = '/src/content/peerconnection/multiple/index.html';
      const url = 'file://' + process.cwd() + path;

      browser.url(url).waitForElementVisible('#startButton', 1000, 'Check that the start button is visible');
      browser.waitForReadyState('#video1', 0, 1000);
      browser.waitForReadyState('#video2', 0, 1000);
      browser.waitForReadyState('#video3', 0, 1000);
      browser.expect.element('#callButton').to.not.be.enabled.before(1000);
      browser.click('#startButton');
      browser.waitForReadyState('#video1', 4, 1000);
      browser.expect.element('#callButton').to.be.enabled.before(1000);
      browser.expect.element('#hangupButton').to.not.be.enabled.before(1000);
      browser.click('#callButton');
      browser.waitForReadyState('#video2', 4, 1000);
      browser.waitForReadyState('#video3', 4, 1000);
      browser.expect.element('#hangupButton').to.be.enabled.before(1000);
      browser.end();
    }
  };
</script>
</script>

0 个答案:

没有答案