我正在尝试将实时MediaStream(最终从摄像机)从peerA投射到peerB,并且我希望peerB实时接收实时流,然后以增加的延迟重播。不幸的是,由于无法跳转到直播时刻,因此无法简单地暂停播放并继续播放。
所以我发现我可以使用MediaRecorder + SourceBuffer重新观看直播。记录流并将缓冲区附加到MSE(SourceBuffer),然后在5秒钟后播放。
这在本地设备(流)上起作用。但是,当我尝试在接收方上使用Media Recorder时,MediaStream(来自pc.onaddstream
)看起来像是获取了一些数据,并且能够将缓冲区追加到sourceBuffer上。但是它不能重播。有时候我只得到一帧。
const [pc1, pc2] = localPeerConnectionLoop()
const canvasStream = canvas.captureStream(200)
videoA.srcObject = canvasStream
videoA.play()
// Note: using two MediaRecorder at the same time seem problematic
// But this one works
// stream2mediaSorce(canvasStream, videoB)
// setTimeout(videoB.play.bind(videoB), 5000)
pc1.addTransceiver(canvasStream.getTracks()[0], {
streams: [ canvasStream ]
})
pc2.onaddstream = (evt) => {
videoC.srcObject = evt.stream
videoC.play()
// Note: using two MediaRecorder at the same time seem problematic
// THIS DOSE NOT WORK
stream2mediaSorce(evt.stream, videoD)
setTimeout(() => videoD.play(), 2000)
}
/**
* Turn a MediaStream into a SourceBuffer
*
* @param {MediaStream} stream Live Stream to record
* @param {HTMLVideoElement} videoElm Video element to play the recorded video in
* @return {undefined}
*/
function stream2mediaSorce (stream, videoElm) {
const RECORDER_MIME_TYPE = 'video/webm;codecs=vp9'
const recorder = new MediaRecorder(stream, { mimeType : RECORDER_MIME_TYPE })
const mediaSource = new MediaSource()
videoElm.src = URL.createObjectURL(mediaSource)
mediaSource.onsourceopen = (e) => {
sourceBuffer = mediaSource.addSourceBuffer(RECORDER_MIME_TYPE);
const fr = new FileReader()
fr.onerror = console.log
fr.onload = ({ target }) => {
console.log(target.result)
sourceBuffer.appendBuffer(target.result)
}
recorder.ondataavailable = ({ data }) => {
console.log(data)
fr.readAsArrayBuffer(data)
}
setInterval(recorder.requestData.bind(recorder), 1000)
}
console.log('Recorder created')
recorder.start()
}
您知道为什么它不能播放视频吗?
我创建了一个fiddle,其中包含所有必要的代码来进行尝试,javascript标签与上面的代码相同(html基本上无关紧要,无需更改)
有些人试图减少延迟,但是我实际上想将其增加到〜10秒,以重新观看您在高尔夫挥杆中做错的事情或其他事情,如果可能的话,请完全避免使用MediaRecorder
编辑: 我在某些RTC扩展程序中找到了一个称为“播放延迟”的内容
允许发送者控制从捕获到渲染时间的最小和最大延迟
我如何使用它? 对我有帮助吗?
答案 0 :(得分:0)
更新,有一项新功能可以启用此功能,称为JitterBufferDelayHint
。
本机WebRTC提供了增加远程源延迟的方法。延迟增加为其他优化和更好的音频,视频质量打开了空间。我们打算创建一个原始试用版,使用户可以设置此值,并在javascript层中读取此值。
参考:https://www.chromestatus.com/feature/5685279400656896
演示:https://jsfiddle.net/brx5n3uh/ 难道我只能在浏览器中设置最大10秒,但是要由UA供应商来决定,尽最大可能使用可用资源
const [pc1, pc2] = localPeerConnectionLoop()
const canvasStream = canvas.captureStream(200)
videoA.srcObject = canvasStream
videoA.play()
pc1.addTransceiver(canvasStream.getTracks()[0], {
streams: [ canvasStream ]
})
pc2.onaddstream = (evt) => {
videoC.srcObject = evt.stream
videoC.play()
}
$dur.onchange = () => {
pc2.getReceivers()[0].jitterBufferDelayHint = $dur.valueAsNumber
}
<h3 style="border-bottom: 1px solid">Original canvas</h3>
<canvas id="canvas" width="100" height="100"></canvas>
<script>
var canvas = document.getElementById("canvas");
var ctx = canvas.getContext("2d");
var radius = canvas.height / 2;
ctx.translate(radius, radius);
radius = radius * 0.90
setInterval(drawClock, 1000);
function drawClock() {
drawFace(ctx, radius);
drawNumbers(ctx, radius);
drawTime(ctx, radius);
}
function drawFace(ctx, radius) {
var grad;
ctx.beginPath();
ctx.arc(0, 0, radius, 0, 2*Math.PI);
ctx.fillStyle = 'white';
ctx.fill();
grad = ctx.createRadialGradient(0,0,radius*0.95, 0,0,radius*1.05);
grad.addColorStop(0, '#333');
grad.addColorStop(0.5, 'white');
grad.addColorStop(1, '#333');
ctx.strokeStyle = grad;
ctx.lineWidth = radius*0.1;
ctx.stroke();
ctx.beginPath();
ctx.arc(0, 0, radius*0.1, 0, 2*Math.PI);
ctx.fillStyle = '#333';
ctx.fill();
}
function drawNumbers(ctx, radius) {
var ang;
var num;
ctx.font = radius*0.15 + "px arial";
ctx.textBaseline="middle";
ctx.textAlign="center";
for(num = 1; num < 13; num++){
ang = num * Math.PI / 6;
ctx.rotate(ang);
ctx.translate(0, -radius*0.85);
ctx.rotate(-ang);
ctx.fillText(num.toString(), 0, 0);
ctx.rotate(ang);
ctx.translate(0, radius*0.85);
ctx.rotate(-ang);
}
}
function drawTime(ctx, radius){
var now = new Date();
var hour = now.getHours();
var minute = now.getMinutes();
var second = now.getSeconds();
//hour
hour=hour%12;
hour=(hour*Math.PI/6)+
(minute*Math.PI/(6*60))+
(second*Math.PI/(360*60));
drawHand(ctx, hour, radius*0.5, radius*0.07);
//minute
minute=(minute*Math.PI/30)+(second*Math.PI/(30*60));
drawHand(ctx, minute, radius*0.8, radius*0.07);
// second
second=(second*Math.PI/30);
drawHand(ctx, second, radius*0.9, radius*0.02);
}
function drawHand(ctx, pos, length, width) {
ctx.beginPath();
ctx.lineWidth = width;
ctx.lineCap = "round";
ctx.moveTo(0,0);
ctx.rotate(pos);
ctx.lineTo(0, -length);
ctx.stroke();
ctx.rotate(-pos);
}
function localPeerConnectionLoop(cfg = {sdpSemantics: 'unified-plan'}) {
const setD = (d, a, b) => Promise.all([a.setLocalDescription(d), b.setRemoteDescription(d)]);
return [0, 1].map(() => new RTCPeerConnection(cfg)).map((pc, i, pcs) => Object.assign(pc, {
onicecandidate: e => e.candidate && pcs[i ^ 1].addIceCandidate(e.candidate),
onnegotiationneeded: async e => {
try {
await setD(await pc.createOffer(), pc, pcs[i ^ 1]);
await setD(await pcs[i ^ 1].createAnswer(), pcs[i ^ 1], pc);
} catch (e) {
console.log(e);
}
}
}));
}
</script>
<h3 style="border-bottom: 1px solid">Local peer (PC1)</h3>
<video id="videoA" muted width="100" height="100"></video>
<h3 style="border-bottom: 1px solid">Remote peer (PC2)</h3>
<video id="videoC" muted width="100" height="100"></video>
<label> Change jitterBufferDelayHint
<input type="number" value="1" id="$dur">
</label>