在MediaRecorder-examples/record-canvas-to-video.js
javascript
软件要求
- Firefox 45.这是一个Firefox技术演示。因此,如果它没有实现我们正在演示的内容,它可能无法在您的浏览器上运行。在 写作时间(2016年1月),您需要下载Firefox Developer Edition或Firefox Nightly。
window.onload = function () {
var video = document.getElementById('video');
var canvas = document.getElementById('canvas');
var width = canvas.width;
var height = canvas.height;
var capturing = false;
video.width = width;
video.height = height;
// We need the 2D context to individually manipulate pixel data
var ctx = canvas.getContext('2d');
// Start with a black background
ctx.fillStyle = '#000';
ctx.fillRect(0, 0, width, height);
// Since we're continuously accessing and overwriting the pixels
// object, we'll request it once and reuse it across calls to draw()
// for best performance (we don't need to create ImageData objects
// on every frame)
var pixels = ctx.getImageData(0, 0, width, height);
var data = pixels.data;
var numPixels = data.length;
var stream = canvas.captureStream(15);
var recorder = new MediaRecorder(stream);
recorder.addEventListener('dataavailable', finishCapturing);
startCapturing();
recorder.start();
setTimeout(function() {
recorder.stop();
}, 2000);
function startCapturing() {
capturing = true;
draw();
}
function finishCapturing(e) {
capturing = false;
var videoData = [ e.data ];
var blob = new Blob(videoData, { 'type': 'video/webm' });
var videoURL = URL.createObjectURL(blob);
video.src = videoURL;
video.play();
}
function draw() {
// We don't want to render again if we're not capturing
if(capturing) {
requestAnimationFrame(draw);
}
drawWhiteNoise();
}
function drawWhiteNoise() {
var offset = 0;
for(var i = 0; i < numPixels; i++) {
var grey = Math.round(Math.random() * 255);
// The data array has pixel values in RGBA order
// (Red, Green, Blue and Alpha for transparency)
// We will make R, G and B have the same value ('grey'),
// then skip the Alpha value by increasing the offset,
// as we're happy with the opaque value we set when painting
// the background black at the beginning
data[offset++] = grey;
data[offset++] = grey;
data[offset++] = grey;
offset++; // skip the alpha component
}
// And tell the context to draw the updated pixels in the canvas
ctx.putImageData(pixels, 0, 0);
}
};
在铬55处产生错误
Uncaught (in promise) DOMException: The play() request was interrupted by a new load request.
Failed to load resource: the server responded with a status of 416 (Requested Range Not Satisfiable)
虽然在firefox 52返回预期的结果。
通过javascript
Blob
事件dataavailable
将MediaRecorder
推送到数组,然后在stop
事件
canvas.captureStream()
以类似于firefox的方式呈现录制的流。
然而,在firefox和chrome上播放视频时所做的调整显示出最小的,但是连续的blob之间有明显的延迟。
如何在MediaRecorder()
元素下使用<video>
呈现// lets hope nobody really uses this address
sSave(sPSP,cpm_entry,RealMake(0xDEAD,0xFFFF));
录制的相同视觉播放?