我正在以这种方式在客户端录制MediaStream:
handleStream(stream) {
const ws = new WebSocket('ws://localhost:5432/binary');
var recorder = new MediaRecorder(stream);
recorder.ondataavailable = function(event) {
ws.send(event.data);
};
recorder.start();
}
这样的数据在服务器端被接受:
const wss = new WebSocket.Server({ port: 5432 });
wss.on('connection', function connection(ws) {
ws.on('message', function incoming(message) {
writeToDisk(message, 'video.webm');
});
});
function writeToDisk(dataURL, fileName) {
var fileBuffer = new Buffer(dataURL, 'base64');
fs.writeFileSync(fileName, fileBuffer);
}
它的工作原理很吸引人,但是我想使用Buffer并让服务器端提供视频直播流。有什么办法吗?
感谢您的帮助。
答案 0 :(得分:0)
我已经完成了此here。
您可以使用MediaRecorder类将视频拆分为多个块,然后将其发送到服务器进行广播。
this._mediaRecorder = new MediaRecorder(this._stream, this._streamOptions);
this._mediaRecorder.ondataavailable = e => this._videoStreamer.pushChunk(e.data);
this._mediaRecorder.start();
...
this._mediaRecorder.requestData()
别忘了每隔一段时间重新开始录制,这样新客户端就不应下载所有视频以连接到流。另外,在更改块的过程中,应将<video>
替换为<image>
或更新视频的海报,以使粘贴顺利进行。
async function imageBitmapToBlob(img) {
return new Promise(res => {
const canvas = document.createElement('canvas');
canvas.width = img.width;
canvas.height = img.height;
canvas.getContext('2d').drawImage(img,0,0);
canvas.toBlob(res);
});
}
...
const stream = document.querySelector('video').captureStream();
if(stream.active==true) {
const track = stream.getVideoTracks()[0];
const capturer = new ImageCapture(track);
const bitmap = await imageBitmapToBlob(await capturer.grabFrame());
URL.revokeObjectURL(this._oldPosterUrl);
this._video.poster = this._oldPosterUrl = URL.createObjectURL(bitmap);
track.stop();
}
您可以通过其构造函数粘合Blob对象。在获取新块的过程中,请不要忘记使用URL.revokeObjectURL()
清除旧视频的内存并更新视频的当前时间
_updateVideo = async (newBlob = false) => {
const stream = this._video.captureStream();
if(stream.active==true) {
const track = stream.getVideoTracks()[0];
const capturer = new ImageCapture(track);
const bitmap = await imageBitmapToBlob(await capturer.grabFrame());
URL.revokeObjectURL(this._oldPosterUrl);
this._video.poster = this._oldPosterUrl = URL.createObjectURL(bitmap);
track.stop();
}
let data = null;
if(newBlob === true) {
const index = this._recordedChunks.length - 1;
data = [this._recordedChunks[index]];
} else {
data = this._recordedChunks;
}
const blob = new Blob(data, this._options);
const time = this._video.currentTime;
URL.revokeObjectURL(this._oldVideoUrl);
const url = this._oldVideoUrl = URL.createObjectURL(blob);
if(newBlob === true) {
this._recordedChunks = [blob];
}
this._size = blob.size;
this._video.src = url;
this._video.currentTime = time;
}
您应该使用两个WebSocket进行视频广播,使用两个WebSocket进行监听。一个WebSocket仅传输视频块,第二个仅传输具有视频头的新blob(每隔一段时间重新开始记录)。
const blobWebSocket = new WebSocket(`ws://127.0.0.1:${blobPort}/`);
blobWebSocket.onmessage = (e) => {
console.log({blob:e.data});
this._videoWorker.pushBlob(e.data);
}
const chunkWebSocket = new WebSocket(`ws://127.0.0.1:${chunkPort}/`);
chunkWebSocket.onmessage = (e) => {
console.log({chunk:e.data});
this._videoWorker.pushChunk(e.data);
}
连接后,服务器将所有当前视频Blob发送给客户端,并开始向客户端动态发送新数据块。
const wss = new WebSocket.Server({ port });
let buffer = new Buffer.alloc(0);
function chunkHandler(buf,isBlob=false) {
console.log({buf,isBlob});
if(isBlob === true) {
//broadcast(wss,buf);
buffer = buf;
} else {
const totalLenght = buffer.length + buf.length;
buffer = Buffer.concat([buffer,buf],totalLenght);
broadcast(wss,buf);
}
}
wss.on('connection', function connection(ws) {
if(buffer.length !== 0) {
ws.send(buffer);
}
});