修改:我需要这样做直播。我不能等到小溪结束。
我从电子的MediaStream
获得了desktopCapturer
个对象:
navigator.mediaDevices.getUserMedia({
audio: false,
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: source.id,
minWidth: 800,
maxWidth: 800,
minHeight: 800,
maxHeight: 800,
},
},
})
.then((stream) => {
我正在尝试使用Buffer
获取静止帧的节点ImageCapture
:
const track = stream.getVideoTracks()[0];
const capturedImage = new ImageCapture(track);
capturedImage // This takes 200ms for 1000x1000
.takePhoto()
.then(blob => {
toBuffer(blob, function (err, buffer) { // 1.5 ms
if (err) throw err;
// TODO: Do some opencv magic with node buffer
});
})
.catch(error => console.error('takePhoto() error:', error));
但takePhoto
需要很长时间。是否有可能使这个过程更快?我可以以某种方式直接访问MediaStream
中的nodejs
吗?
答案 0 :(得分:2)
这就是我最终要做的事情。它实际上非常高效。
}).then((stream) => {
const video = document.createElement('video');
video.srcObject = stream;
video.onloadedmetadata = () => {
video.play();
setInterval(() => {
const canvas = document.createElement('canvas');
canvas.getContext('2d').drawImage(video, 0, 0, 800, 800);
canvas.toBlob(blob => {
toBuffer(blob, function (err, buffer) {
if (err) throw err;
// do some magic with buffer
});
});
}, 40);
};