在JS中从视频中提取帧

时间:2017-06-01 01:38:48

标签: javascript video canvas frame

我正在尝试构建一个在JavaScript中从视频中提取帧的函数。这里是我提出的代码。该函数接收源和回调。从那里开始,我创建了一个带有源的视频,我想在画布中以设定的间隔绘制视频帧。

不幸的是,返回的帧都是透明图像。

我尝试了一些不同的东西,但我无法让它发挥作用。有人可以帮忙吗? 感谢。

const extractFramesFromVideo = function(src, callback) {
  var video = document.createElement('video');
  video.src = src;

  video.addEventListener('loadeddata', function() {
    var canvas = document.createElement('canvas');
    var context = canvas.getContext('2d');
    canvas.setAttribute('width', video.videoWidth);
    canvas.setAttribute('height', video.videoHeight);

    var frames = [];
    var fps = 1; // Frames per seconds to
    var interval = 1 / fps; // Frame interval
    var maxDuration = 10; // 10 seconds max duration
    var currentTime = 0; // Start at 0

    while (currentTime < maxDuration) {
      video.currentTime = currentTime; 
      context.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
      var base64ImageData = canvas.toDataURL();
      frames.push(base64ImageData);

      currentTime += interval;

      if (currentTime >= maxDuration) {
        console.log(frames);
        callback(frames);
      }
    }
  });
}

export default extractFramesFromVideo;

2 个答案:

答案 0 :(得分:1)

调整代码以等待seeked事件,并修复了一些零碎之后,它似乎可以正常工作:

async function extractFramesFromVideo(videoUrl, fps=25) {
  return new Promise(async (resolve) => {

    // fully download it first (no buffering):
    let videoBlob = await fetch(videoUrl).then(r => r.blob());
    let videoObjectUrl = URL.createObjectURL(videoBlob);
    let video = document.createElement("video");

    let seekResolve;
    video.addEventListener('seeked', async function() {
      if(seekResolve) seekResolve();
    });

    video.addEventListener('loadeddata', async function() {
      let canvas = document.createElement('canvas');
      let context = canvas.getContext('2d');
      let [w, h] = [video.videoWidth, video.videoHeight]
      canvas.width =  w;
      canvas.height = h;

      let frames = [];
      let interval = 1 / fps;
      let currentTime = 0;
      let duration = video.duration;

      while(currentTime < duration) {
        video.currentTime = currentTime;
        await new Promise(r => seekResolve=r);

        context.drawImage(video, 0, 0, w, h);
        let base64ImageData = canvas.toDataURL();
        frames.push(base64ImageData);

        currentTime += interval;
      }
      resolve(frames);
    });

    // set video src *after* listening to events in case it loads so fast
    // that the events occur before we were listening.
    video.src = videoObjectUrl; 

  });
}

用法:

let frames = await extractFramesFromVideo("https://example.com/video.webm");

答案 1 :(得分:0)

currentComponent.refs.videopreview在HTML页面中为<video ref="videopreview" autoPlay></video> 下面是从视频中提取帧的代码。

   const getFrame = () => {

        const video = currentComponent.refs.videopreview;
        if(!video.srcObject) {
          return null;
        }
        const canvas = document.createElement('canvas');
        canvas.width = canvasWidth;
        canvas.height = canvasHeight;
        canvas.getContext('2d').drawImage(video, 0,0);
        const data = canvas.toDataURL('image/jpeg');     
        return data; // frame data
      }

getFrame函数可以按要求的时间间隔调用。