WebRTC重新协商对等连接以切换流

时间:2015-07-01 15:15:30

标签: javascript node.js video webrtc

我有这个脚本在哪里。两个用户可以使用webrtc聊天。当两个用户进入聊天室时。文本聊天会自动启动。 我想添加一个按钮以允许视频聊天。例如,有两个用户。 user_1和user_2进入聊天室时,会启动文字聊天,他们可以向对方发送短信,当用户_1点击视频图标时,user_2可以看到user_1。与用户_2点击视频图标时的情况相同。 这是我现在使用的代码。但这个代码不能正常工作,当我与某人聊天时,我点击视频图标我可以看到我自己,但其他用户看不到我。我没有发布所有代码,因为它超过300行,但我认为你不能帮助我修改这个使它工作,并提前感谢大家。

var pc_config = webrtcDetectedBrowser === 'firefox' ?
{'iceServers':[{'url':'stun:23.21.150.121'}]} : // IP address
{'iceServers': [{'url': 'stun:stun.l.google.com:19302'}]};
var pc_constraints = {
  'optional': [
    {'DtlsSrtpKeyAgreement': true},
    {'RtpDataChannels': true}
  ]};
var sdpConstraints = {'mandatory': {
  'OfferToReceiveAudio':true,
  'OfferToReceiveVideo':true }};
var constraints = {video: true, audio: true};
var v_on_off = false;
v_call.on('click', function(){
  if (!v_on_off) {
        navigator.getUserMedia(constraints, handleUserMedia, handleUserMediaError);
        if (isInitiator) {
            maybeStart();
        };
      v_on_off = true;
  } else {
      // stop stream
  }
});
function handleUserMedia(stream) {
        localStream = stream;
        attachMediaStream(localVideo, stream);
        sendMessage('got user media');
}
var socket = io.connect();
if (room !== '') {
  socket.emit('create or join', room);
}
socket.on('created', function (room){
  isInitiator = true;
});
socket.on('join', function (room){
  isChannelReady = true;
});
socket.on('joined', function (room){
  isChannelReady = true;
});
function sendMessage(message){
  socket.emit('message', message);
}
// this will start a text chat between too peers
sendMessage('got user media');
if (isInitiator) {
    maybeStart();
  }
socket.on('message', function (message){
  console.log('Received message:', message);
  if (message === 'got user media') {
    maybeStart();
  } else if (message.type === 'offer') {
    if (!isInitiator && !isStarted) {
      maybeStart();
    }
    pc.setRemoteDescription(new RTCSessionDescription(message));
    doAnswer();
  } else if (message.type === 'answer' && isStarted) {
    pc.setRemoteDescription(new RTCSessionDescription(message));
  } else if (message.type === 'candidate' && isStarted) {
    var candidate = new RTCIceCandidate({sdpMLineIndex:message.label,
      candidate:message.candidate});
    pc.addIceCandidate(candidate);
  } else if (message === 'bye' && isStarted) {
    handleRemoteHangup();
  }
});
function maybeStart() {
  if (!isStarted && isChannelReady) {
    createPeerConnection();
    isStarted = true;
    if (isInitiator) {
      doCall();
    }
  }
}
function createPeerConnection() {
  try {
    pc = new RTCPeerConnection(pc_config, pc_constraints);
    if (typeof localStream != 'undefined') {
        pc.addStream(localStream);
    }
    pc.onicecandidate = handleIceCandidate;
  } catch (e) {
    alert('Cannot create RTCPeerConnection object.');
      return;
  }
  pc.onaddstream = handleRemoteStreamAdded;
  pc.onremovestream = handleRemoteStreamRemoved;
  if (isInitiator) {
    try {
      // Reliable Data Channels not yet supported in Chrome
      sendChannel = pc.createDataChannel("sendDataChannel",
        {reliable: false});
      sendChannel.onmessage = handleMessage;
      trace('Created send data channel');
    } catch (e) {
      alert('Failed to create data channel. ' +
            'You need Chrome M25 or later with RtpDataChannel enabled');
      trace('createDataChannel() failed with exception: ' + e.message);
    }
    sendChannel.onopen = handleSendChannelStateChange;
    sendChannel.onclose = handleSendChannelStateChange;
  } else {
    pc.ondatachannel = gotReceiveChannel;
  }
}
function gotReceiveChannel(event) {
  trace('Receive Channel Callback');
  sendChannel = event.channel;
  sendChannel.onmessage = handleMessage;
  sendChannel.onopen = handleReceiveChannelStateChange;
  sendChannel.onclose = handleReceiveChannelStateChange;
}
function handleSendChannelStateChange() {
  var readyState = sendChannel.readyState;
  trace('Send channel state is: ' + readyState);
  enableMessageInterface(readyState == "open");
}
function handleReceiveChannelStateChange() {
  var readyState = sendChannel.readyState;
  trace('Receive channel state is: ' + readyState);
  enableMessageInterface(readyState == "open");
}
function handleIceCandidate(event) {
  console.log('handleIceCandidate event: ', event);
  if (event.candidate) {
    sendMessage({
      type: 'candidate',
      label: event.candidate.sdpMLineIndex,
      id: event.candidate.sdpMid,
      candidate: event.candidate.candidate});
  } else {
    console.log('End of candidates.');
  }
}
function doCall() {
  var constraints = {'optional': [], 'mandatory': {'MozDontOfferDataChannel': true}};
  // temporary measure to remove Moz* constraints in Chrome
  if (webrtcDetectedBrowser === 'chrome') {
    for (var prop in constraints.mandatory) {
      if (prop.indexOf('Moz') !== -1) {
        delete constraints.mandatory[prop];
      }
     }
   }
  constraints = mergeConstraints(constraints, sdpConstraints);
  console.log('Sending offer to peer, with constraints: \n' +
    '  \'' + JSON.stringify(constraints) + '\'.');
  pc.createOffer(setLocalAndSendMessage, null, constraints);
}
function doAnswer() {
  console.log('Sending answer to peer.');
  pc.createAnswer(setLocalAndSendMessage, null, sdpConstraints);
}
function handleRemoteStreamAdded(event) {
  console.log('Remote stream added.');
  attachMediaStream(remoteVideo, event.stream);
  remoteStream = event.stream;
}  

这是服务器端代码:

socket.on('message', function (message) {
    // channel-only broadcast...
    socket.broadcast.to(message.channel).emit('message', message);
});
// Handle 'create or join' messages
socket.on('create or join', function (room) {
    var numClients = io.sockets.clients(room).length;
    // First client joining...
    if (numClients == 0){
        socket.join(room);
        socket.emit('created', room);
    } else if (numClients == 1) {
        io.sockets.in(room).emit('join', room);
        socket.join(room);
        socket.emit('joined', room);
    } else { 
    socket.emit('full', room);
    }
});

2 个答案:

答案 0 :(得分:6)

重新协商

简而言之,为了将视频或音频添加到现有连接,每次进行媒体更改时,都需要重新协商连接。基本上你注册了一个听众:

pc.onnegotiationneeded = function() {
  pc.createOffer(setLocalAndSendMessage, null);
};

启动另一次提供/回答交换的往返,就像启动连接的那次。

完成后,negotiationneeded事件将从需要重新协商的操作中触发。例如,“添加视频”按钮:

AddVideoButton.onclick = function() {
  navigator.getUserMedia(constraints, handleUserMedia, handleUserMediaError);
};

一旦双方都进行了更新,您应该开展业务。

有关完整示例,请参阅my answer to a similar question(由于箭头功能,仅限Firefox,抱歉)。

规范:

看起来您正在使用adapter.js跨浏览器webrtc polyfill来处理浏览器之间的差异,这太棒了!但是,您的示例的其他部分是特定于Chrome或过时的,除非您遵循the standard不能在其他浏览器上工作。您没有将自己的问题标记为特定于Chrome,因此如果您不介意:

Firefox 32(一年前)不再需要pc_config中的浏览器检测功能。相反,我会使用(注意urls复数):

var config = { iceServers: [{urls: 'stun:stun.l.google.com:19302'}] };

pc_constraints(早期Chrome)和sdpConstraints是非标准的。 createOffer现在需要RTCOfferOptions,一个简单的字典,而不是约束(也注意小写'o'):

var options = { offerToReceiveAudio: true, offerToReceiveVideo: true };

如果您使用的是最新版本的adapter.js,那么这应该可行。

最后,RTP上的数据通道是非标准的(我认为不再需要了吗?)

答案 1 :(得分:0)

您只需将pc_constraint中的DtlsSrtpKeyAgreement设置为true,它将正常工作

image.getHeight()