如何使用媒体源api将两个视频文件数据附加到源缓冲区?

时间:2013-01-01 06:56:27

标签: javascript html5 api video streaming

我有两个视频名称为v11.webm和v12.webm。

我想要的是这两个视频应该无缝地运行。

我正在遵循将数据附加到源缓冲区的媒体源api方法。

我指的是此link

上的演示

我修改了这个示例并删除了分块视频的部分,并尝试将数据附加到源缓冲区文件中。

我的代码如下:

<script>

var video = document.querySelector('video');

window.MediaSource = window.MediaSource || window.WebKitMediaSource;
if (!!!window.MediaSource) {
  alert('MediaSource API is not available');
}

var mediaSource = new MediaSource();

video.src = window.URL.createObjectURL(mediaSource);

mediaSource.addEventListener('webkitsourceopen', function(e) {

    var sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');  

    for(i=1;i<=2;i++)
    {
        (function(i){

          GET('v1'+i+'.webm', function(uInt8Array) {
              var file = new Blob([uInt8Array], {type: 'video/webm'});

              var reader = new FileReader();
              reader.onload = function(e) {
                sourceBuffer.append(new Uint8Array(e.target.result));            
              };
              reader.readAsArrayBuffer(file);

          });
        })(i);
    }

}, false);

mediaSource.addEventListener('webkitsourceended', function(e) {
  logger.log('mediaSource readyState: ' + this.readyState);
}, false);

function GET(url, callback) {
 // alert(url);
  var xhr = new XMLHttpRequest();
  xhr.open('GET', url, true);
  xhr.responseType = 'arraybuffer';
  xhr.send();

  xhr.onload = function(e) {
    if (xhr.status != 200) {
      alert("Unexpected status code " + xhr.status + " for " + url);
      return false;
    }
    callback(new Uint8Array(xhr.response));
  };
}
</script>

现在代码无法正常工作。

v11.webm和v12.webm文件数据混合不一致。

它没有无缝运行。

6 个答案:

答案 0 :(得分:10)

也许有点晚了,但我能够弄清楚这一点。您的新视频会覆盖旧视频,因为它们都是在时间0开始。您必须指定新视频在附加X之前的时间X开始,因此您的'webkitsourceopen'事件函数应为:

/* forget the sourcebuffer variable, we'll just manipulate mediaSource */
mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');

/* it seems ok to set initial duration 0 */
var duration = 0;
var totalVideos = 2;

/* use this type of loop to ensure that that a single video
   is downloaded and appended before moving on to the next video,
   mediasource seems picky about these being in order */
var i = 0;
(function readChunk_(i){

    /* the GET function already returns a Uint8Array.
       the demo you linked reads it in filereader in order to manipulate it;
       you just want to immediately append it */
    GET('v1' + (i + 1) + '.webm', function(uint8Array){

        if(i == totalVideos) {
            mediaSource.endOfStream();
        } else {

            /* assuming your videos are put together correctly
               (i.e. duration is correct), set the timestamp offset
               to the length of the total video */
            mediaSource.sourceBuffers[0].timestampOffset = duration;

            mediaSource.sourceBuffers[0].append(uint8Array);

            /* set new total length */
            duration = mediaSource.duration;

            readChunk(++i);
        }
    });
})(i);

现在,如果只有MediaSource对它所接受的视频结构不那么挑剔。我还没有找到一个样本.webm,它与您链接的Eric Bidelman's Demo中使用的样本相同。

编辑:在进行更多测试后,我设置持续时间的方式可能不正确。如果您在每次追加后似乎达到指数持续时间增长,请尝试将timestampoffset设置为0而不更改它。我不知道为什么这似乎解决了它,这可能是我如何生成webm文件的问题。

答案 1 :(得分:1)

我在代码中缺少的是:mediaSource.endOfStream();

你能详细说明不一致的混音问题吗?

答案 2 :(得分:1)

只需将sourceBuffer的模式设置为'sequence'(默认似乎是'segments'

摘自文档:https://developer.mozilla.org/en-US/docs/Web/API/SourceBuffer/mode

顺序:将片段追加到SourceBuffer的顺序决定了它们的播放顺序。将自动为遵守此顺序的细分生成细分时间戳记。

在我的应用中,我只是在将源缓冲区添加到媒体源中之后进行设置:

// Create media source for the stream, and create the source buffer when ready
let self = this;
this._mediaSource = new MediaSource();
this._mediaSource.addEventListener('sourceopen', function () {
  self._sourceBuffer = self.mediaSource.addSourceBuffer(environment.recordingMimeType);
  self._sourceBuffer.mode = 'sequence'; // This is the relevant part
  self._sourceBuffer.addEventListener('error', function (ev) {
    console.error("Source buffer error ??");
    console.error(ev);
  });
});

答案 3 :(得分:0)

规范声明播放之间的差距不应大于最小的音频帧,你是否符合这个?不幸的是,我不认为它说明在没有音频的情况下该怎么做。

答案 4 :(得分:0)

我想问您有点晚,但是您是否成功将mp4倍数附加到媒体源的源缓冲区中?

答案 5 :(得分:0)

我有一个完美的例子,可以用简单的方法来解决这个问题……

我正在使用三个静态文件,但您也可以从套接字或任何api附加数据。

<!DOCTYPE html>
<html>

<head>
</head>

<body>
  <br>
  <video controls="true" autoplay="true"></video>

  <script>
    (async() => {


      const mediaSource = new MediaSource();

      const video = document.querySelector("video");

      // video.oncanplay = e => video.play();

      const urls = ["https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4", "https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4","https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4"];

      const request = url => fetch(url).then(response => response.arrayBuffer());

      // `urls.reverse()` stops at `.currentTime` : `9`
      const files = await Promise.all(urls.map(request));

      /*
       `.webm` files
       Uncaught DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
       Uncaught DOMException: Failed to set the 'timestampOffset' property on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
      */
      // const mimeCodec = "video/webm; codecs=opus";
      // https://stackoverflow.com/questions/14108536/how-do-i-append-two-video-files-data-to-a-source-buffer-using-media-source-api/
      const mimeCodec = "video/mp4; codecs=avc1.42E01E, mp4a.40.2";


      const media = await Promise.all(files.map(file => {
        return new Promise(resolve => {
          let media = document.createElement("video");
          let blobURL = URL.createObjectURL(new Blob([file]));
          media.onloadedmetadata = async e => {
            resolve({
              mediaDuration: media.duration,
              mediaBuffer: file
            })
          }
          media.src = blobURL;
        })
      }));

      console.log(media);

      mediaSource.addEventListener("sourceopen", sourceOpen);

      video.src = URL.createObjectURL(mediaSource);

      async function sourceOpen(event) {

        if (MediaSource.isTypeSupported(mimeCodec)) {
          const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);

          for (let chunk of media) {
            await new Promise(resolve => {
              sourceBuffer.appendBuffer(chunk.mediaBuffer);
              sourceBuffer.onupdateend = e => {
                sourceBuffer.onupdateend = null;
                sourceBuffer.timestampOffset += chunk.mediaDuration;
                console.log(mediaSource.duration);
                resolve()
              }
            })

          }

          mediaSource.endOfStream();

        }  
        else {
          console.warn(mimeCodec + " not supported");
        }
      };

    })()
  </script>


</body>

</html>

所有功劳归给打印信息的这个人https://github.com/guest271314