<video>使用来自<canvas>的MediaRecorder()使用canvas.captureStream()回放录制的流,在firefox,chrome中呈现不同的颜色

时间:2016-10-08 22:09:12

标签: javascript firefox chromium capture mediarecorder-api

MediaRecorder-examples/record-canvas-to-video.js

使用原始javascript
  

软件要求

     
      
  • Firefox 45.这是一个Firefox技术演示。因此,如果它没有实现我们正在演示的内容,它可能无法在您的浏览器上运行。在   写作时间(2016年1月),您需要下载Firefox   Developer Edition或Firefox Nightly。
  •   
window.onload = function () {
  var video = document.getElementById('video');
  var canvas = document.getElementById('canvas');
  var width = canvas.width;
  var height = canvas.height;
  var capturing = false;

  video.width = width;
  video.height = height;

  // We need the 2D context to individually manipulate pixel data
  var ctx = canvas.getContext('2d');

  // Start with a black background
  ctx.fillStyle = '#000';
  ctx.fillRect(0, 0, width, height);

  // Since we're continuously accessing and overwriting the pixels
  // object, we'll request it once and reuse it across calls to draw()
  // for best performance (we don't need to create ImageData objects
  // on every frame)
  var pixels = ctx.getImageData(0, 0, width, height);
  var data = pixels.data;
  var numPixels = data.length;

  var stream = canvas.captureStream(15);
  var recorder = new MediaRecorder(stream);

  recorder.addEventListener('dataavailable', finishCapturing);

  startCapturing();
  recorder.start();

  setTimeout(function() {
    recorder.stop();
  }, 2000);


  function startCapturing() {
    capturing = true;
    draw();
  }


  function finishCapturing(e) {
    capturing = false;
    var videoData = [ e.data ];
    var blob = new Blob(videoData, { 'type': 'video/webm' });
    var videoURL = URL.createObjectURL(blob);
    video.src = videoURL;
    video.play();
  }


  function draw() {
    // We don't want to render again if we're not capturing
    if(capturing) {
      requestAnimationFrame(draw);
    }
    drawWhiteNoise();
  }


  function drawWhiteNoise() {
    var offset = 0;

    for(var i = 0; i < numPixels; i++) {
      var grey = Math.round(Math.random() * 255);

      // The data array has pixel values in RGBA order
      // (Red, Green, Blue and Alpha for transparency)
      // We will make R, G and B have the same value ('grey'),
      // then skip the Alpha value by increasing the offset,
      // as we're happy with the opaque value we set when painting
      // the background black at the beginning
      data[offset++] = grey;
      data[offset++] = grey;
      data[offset++] = grey;
      offset++; // skip the alpha component
    }

    // And tell the context to draw the updated pixels in the canvas
    ctx.putImageData(pixels, 0, 0);
  }

};

在铬55处产生错误

Uncaught (in promise) DOMException: The play() request was interrupted by a new load request.

Failed to load resource: the server responded with a status of 416 (Requested Range Not Satisfiable)

虽然在firefox 52返回预期的结果。

通过javascript Blob事件dataavailableMediaRecorder推送到数组,然后在stop事件

window.onload = function () { var blobs = []; var video = document.getElementById('video'); var canvas = document.getElementById('canvas'); var width = canvas.width; var height = canvas.height; var capturing = false; video.width = width; video.height = height; // We need the 2D context to individually manipulate pixel data var ctx = canvas.getContext('2d'); // Start with a black background ctx.fillStyle = '#000'; ctx.fillRect(0, 0, width, height); // Since we're continuously accessing and overwriting the pixels // object, we'll request it once and reuse it across calls to draw() // for best performance (we don't need to create ImageData objects // on every frame) var pixels = ctx.getImageData(0, 0, width, height); var data = pixels.data; var numPixels = data.length; var stream = canvas.captureStream(15); var recorder = new MediaRecorder(stream); recorder.addEventListener('dataavailable', finishCapturing); recorder.addEventListener('stop', function(e) { video.oncanplay = video.play; video.src = URL.createObjectURL(new Blob(blobs, {type:"video/webm"})); }); startCapturing(); recorder.start(); setTimeout(function() { capturing = false; recorder.stop(); }, 2000); function startCapturing() { capturing = true; draw(); } function finishCapturing(e) { blobs.push(e.data); } function draw() { // We don't want to render again if we're not capturing if(capturing) { requestAnimationFrame(draw); } drawWhiteNoise(); } function drawWhiteNoise() { var offset = 0; for(var i = 0; i < numPixels; i++) { var grey = Math.round(Math.random() * 255); // The data array has pixel values in RGBA order // (Red, Green, Blue and Alpha for transparency) // We will make R, G and B have the same value ('grey'), // then skip the Alpha value by increasing the offset, // as we're happy with the opaque value we set when painting // the background black at the beginning data[offset++] = grey; data[offset++] = grey; data[offset++] = grey; offset++; // skip the alpha component } // And tell the context to draw the updated pixels in the canvas ctx.putImageData(pixels, 0, 0); } }; 以用于铬>
canvas.captureStream()

以类似于firefox的方式呈现录制的流。

然而,在firefox和chrome上播放视频时所做的调整显示出最小的,但是连续的blob之间有明显的延迟。

如何在MediaRecorder()元素下使用<video>呈现// lets hope nobody really uses this address sSave(sPSP,cpm_entry,RealMake(0xDEAD,0xFFFF)); 录制的相同视觉播放?

plnkr http://plnkr.co/edit/KgGpkCJRvPG2T2Jy4wyH?p=preview

1 个答案:

答案 0 :(得分:1)

你在这里从主JS线程驱动动画,所以其他主要的线程JS活动 - 比如ondataavailable回调触发 - 可能会破坏时间到足以引起注意。

尝试从canvas.captureStream()调用中省略(60)帧速率。

MDN说:“如果没有设置,每次画布更改时都会捕获一个新帧;如果设置为0,则会捕获一个帧。”

这应该有希望使输出更加不受这种中断的影响,但代价是缩短其长度。

您还可以使用 start 方法指定timeslice,例如recorder.start(2000)限制何时触发dataavailable事件以避免中断。