在How to use Blob URL, MediaSource or other methods to play concatenated Blobs of media fragments?处OP的代码进一步开发后,已经能够实现使用MediaRecorder
记录离散媒体片段的要求,使用{{3}为生成的webm
文件添加提示在Chromium和Firefox浏览器中使用MediaSource
.mode
SourceBuffer
设置为"sequence"
,将离散媒体片段记录为单个媒体文件。
ts-ebml
上的Chromium问题讨论了"sequence"
模式正在考虑弃用多轨SourceBuffer
个对象。当问及OP问题如何使用"segments"
.mode
(AppendMode
的默认SourceBuffer
)来实现代码时,响应基本上是"segments"
模式也支持多轨输入在SourceBuffer
。
然而,当尝试.mode
SourceBuffer
设置为"segments"
Chromium 60的代码仅播放大约一秒时,多个附加缓冲区的第一个缓冲区,预期十秒播放记录的媒体片段的提示设置为webm
文件,转换为ArrayBuffer
并传递给.appendBuffer()
,而当.mode
设置为"sequence"
时,Firefox会呈现相同的结果}和"segments"
。
Monitor and potentially deprecate support for multitrack SourceBuffer support of 'sequence' AppendMode,可以在Chromium和Firefox上呈现预期结果。请注意,如果尝试.mp4
,则Firefox无法在<video>
元素上播放multipleUrls
,但Firefox支持在设置正确的媒体编解码器时在.mp4
播放MediaSource
。
<!DOCTYPE html>
<html>
<!-- recordMediaFragments.js demo https://github.com/guest271314/recordMediaFragments/tree/master/demos 2017 guest271314 -->
<head>
<!-- https://github.com/guest271314/recordMediaFragments/ts-ebml -->
</head>
<body>
<video width="320" height="280" controls="true"></video>
<script>
(async() => {
let request = await fetch("https://raw.githubusercontent.com/guest271314/recordMediaFragments/master/ts-ebml/ts-ebml-min.js");
let blob = await request.blob();
const script = document.createElement("script");
document.head.appendChild(script);
script.src = URL.createObjectURL(blob);
script.onload = () => {
const tsebml = require("ts-ebml");
const video = document.querySelector("video");
const videoStream = document.createElement("video");
// `MediaSource`
const mediaSource = new MediaSource();
// for firefox
// see https://bugzilla.mozilla.org/show_bug.cgi?id=1259788
const hasCaptureStream = HTMLMediaElement.prototype.hasOwnProperty("captureStream");
// handle firefox and chromium
const captureStream = mediaElement =>
!!mediaElement.mozCaptureStream
? mediaElement.mozCaptureStream()
: mediaElement.captureStream();
let currentFragmentURL, currentBlobURL, fragments;
videoStream.width = video.width;
videoStream.height = video.height;
const mimeCodec = "video/webm;codecs=vp8,opus";
// set to `.currentTime` of `videoStream` at `pause`
// to set next media fragment starting `.currentTime`
// if URL to be set at `.src` has same origin and pathname
let cursor = 0;
// https://gist.github.com/jsturgis/3b19447b304616f18657
// https://www.w3.org/2010/05/video/mediaevents.html
const multipleUrls = [
"https://media.w3.org/2010/05/sintel/trailer.mp4#t=0,5",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=55,60",
"https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4#t=0,5",
"https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerBlazes.mp4#t=0,5",
"https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerJoyrides.mp4#t=0,5",
"https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerMeltdowns.mp4#t=0,6",
"https://media.w3.org/2010/05/video/movie_300.mp4#t=30,36"
];
const singleUrl = [
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=0,1",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=1,2",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=2,3",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=3,4",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=4,5",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=5,6",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=6,7",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=7,8",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=8,9",
"https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4#t=9,10"
];
const geckoUrl = [
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=10,11",
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=11,12",
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=12,13",
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=13,14",
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=14,15",
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=15,16",
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=16,17",
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=17,18",
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=18,19",
"https://mirrors.creativecommons.org/movingimages/webm/ScienceCommonsJesseDylan_240p.webm#t=19,20"
];
const mediaFragmentRecorder = async(urls) => {
// `ts-ebml`
const tsebmlTools = async() => ({
decoder: new tsebml.Decoder(),
encoder: new tsebml.Encoder(),
reader: new tsebml.Reader(),
tools: tsebml.tools
});
// create `ArrayBuffer` from `Blob`
const readAsArrayBuffer = (blob) => {
return new Promise((resolve, reject) => {
const fr = new FileReader();
fr.readAsArrayBuffer(blob);
fr.onloadend = () => {
resolve(fr.result);
};
fr.onerror = (ev) => {
reject(ev.error);
};
});
}
// `urls`: string or array of URLs
// record each media fragment
const recordMediaFragments = async(video, mimeCodec, decoder, encoder, reader, tools, ...urls) => {
urls = [].concat(...urls);
const media = [];
for (let url of urls) {
await new Promise(async(resolve) => {
let mediaStream, recorder;
videoStream.onprogress = e => {
videoStream.onprogress = null;
console.log("loading " + url)
}
videoStream.oncanplay = async(e) => {
videoStream.oncanplay = null;
videoStream.play();
mediaStream = captureStream(videoStream);
console.log(mediaStream);
recorder = new MediaRecorder(mediaStream, {
mimeType: mimeCodec
});
recorder.ondataavailable = async(e) => {
// set metadata of recorded media fragment `Blob`
const mediaBlob = await setMediaMetadata(e.data);
// create `ArrayBuffer` of `Blob` of recorded media fragment
const mediaBuffer = await readAsArrayBuffer(mediaBlob);
const mediaDuration = videoStream.played.end(0) - videoStream.played.start(0);
const mediaFragmentId = currentFragmentURL || new URL(url);
const mediaFileName = mediaFragmentId.pathname.split("/").pop() + mediaFragmentId.hash;
const mediaFragmentType = "singleMediaFragment";
if (currentBlobURL) {
URL.revokeObjectURL(currentBlobURL);
}
media.push({
mediaBlob,
mediaBuffer,
mediaDuration,
mediaFragmentType,
mediaFileName
});
resolve();
}
recorder.start();
}
videoStream.onpause = e => {
videoStream.onpause = null;
cursor = videoStream.currentTime;
recorder.stop();
// stop `MediaStreamTrack`s
for (let track of mediaStream.getTracks()) {
track.stop();
}
}
currentFragmentURL = new URL(url);
// for firefox to load cross origin media without silence
if (!hasCaptureStream) {
console.log(currentFragmentURL);
request = new Request(currentFragmentURL.href);
blob = await fetch(request).then(response => response.blob());
console.log(blob);
currentBlobURL = URL.createObjectURL(blob);
// set next media fragment URL to `.currentTime` at `pause` event
// of previous media fragment if `url` has same `origin` and `pathname`
if (urls.indexOf(currentFragmentURL.href) > 0
&& new URL(urls[urls.indexOf(currentFragmentURL.href) - 1]).origin === currentFragmentURL.origin
&& new URL(urls[urls.indexOf(currentFragmentURL.href) - 1]).pathname === currentFragmentURL.pathname) {
if (cursor > 0) {
url = url = currentBlobURL + currentFragmentURL.hash.replace(/=\d+/, "=" + cursor);
console.log(url)
}
} else {
url = currentBlobURL + currentFragmentURL.hash;
}
} else {
if (cursor > 0
&& new URL(urls[urls.indexOf(url) - 1]).origin === currentFragmentURL.origin
&& new URL(urls[urls.indexOf(currentFragmentURL.href) - 1]).pathname === currentFragmentURL.pathname) {
url = url.replace(/=\d+/, "=" + cursor);
console.log(url)
}
}
videoStream.src = url;
}).catch(err => err)
}
return media
}
// set metadata of media `Blob`
// see https://github.com/legokichi/ts-ebml/issues/14#issuecomment-325200151
const setMediaMetadata = async(blob) =>
tsebmlTools()
.then(async({
decoder,
encoder,
tools,
reader
}) => {
let webM = new Blob([], {
type: "video/webm"
});
webM = new Blob([webM, blob], {
type: blob.type
});
const buf = await readAsArrayBuffer(blob);
const elms = decoder.decode(buf);
elms.forEach((elm) => {
reader.read(elm);
});
reader.stop();
const refinedMetadataBuf = tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);
const webMBuf = await readAsArrayBuffer(webM);
const body = webMBuf.slice(reader.metadataSize);
const refinedWebM = new Blob([refinedMetadataBuf, body], {
type: webM.type
});
// close Blobs
if (webM.close && blob.close) {
webM.close();
blob.close();
}
return refinedWebM;
})
.catch(err => console.error(err));
let mediaTools = await tsebmlTools();
const {
decoder,
encoder,
reader,
tools
} = mediaTools;
const mediaFragments = await recordMediaFragments(video, mimeCodec, decoder, encoder, reader, tools, urls);
const recordedMedia = await new Promise((resolveAllMedia, rejectAllMedia) => {
console.log(decoder, encoder, tools, reader, mediaFragments);
let mediaStream, recorder;
mediaSource.onsourceended = e => {
console.log(video.buffered.start(0), video.buffered.end(0));
video.currentTime = video.buffered.start(0);
console.log(video.paused, video.readyState);
video.ontimeupdate = e => {
console.log(video.currentTime, mediaSource.duration);
if (video.currentTime >= mediaSource.duration) {
video.ontimeupdate = null;
video.oncanplay = null;
video.onwaiting = null;
if (recorder.state === "recording") {
recorder.stop();
}
console.log(e, recorder);
}
}
}
video.onended = (e) => {
video.onended = null;
console.log(e, video.currentTime,
mediaSource.duration);
}
video.oncanplay = e => {
console.log(e, video.duration, video.buffered.end(0));
video.play()
}
video.onwaiting = e => {
console.log(e, video.currentTime);
}
// record `MediaSource` playback of recorded media fragments
video.onplaying = async(e) => {
console.log(e);
video.onplaying = null;
mediaStream = captureStream(video);
if (!hasCaptureStream) {
videoStream.srcObject = mediaStream;
videoStream.play();
}
recorder = new MediaRecorder(mediaStream, {
mimeType: mimeCodec
});
console.log(recorder);
recorder.ondataavailable = async(e) => {
console.log(e);
const mediaFragmentsRecording = {};
mediaFragmentsRecording.mediaBlob = await setMediaMetadata(e.data);
mediaFragmentsRecording.mediaBuffer = await readAsArrayBuffer(mediaFragmentsRecording.mediaBlob);
mediaFragmentsRecording.mediaFileName = urls.map(url => {
const id = new URL(url);
return id.pathname.split("/").pop() + id.hash
}).join("-");
mediaFragmentsRecording.mediaFragmentType = "multipleMediaFragments";
// `<video>` to play concatened media fragments
// recorded from playback of `MediaSource`
fragments = document.createElement("video");
fragments.id = "fragments";
fragments.width = video.width;
fragments.height = video.height;
fragments.controls = true;
fragments.onloadedmetadata = () => {
fragments.onloadedmetadata = null;
mediaFragmentsRecording.mediaDuration = fragments.duration;
URL.revokeObjectURL(currentBlobURL);
// stop `MediaStreamTrack`s
for (let track of mediaStream.getTracks()) {
track.stop();
}
resolveAllMedia([
...mediaFragments, mediaFragmentsRecording
]);
}
currentBlobURL = URL.createObjectURL(mediaFragmentsRecording.mediaBlob);
fragments.src = currentBlobURL;
document.body.appendChild(fragments);
}
recorder.start();
}
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
async function sourceOpen(e) {
if (MediaSource.isTypeSupported(mimeCodec)) {
const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
sourceBuffer.mode = "segments";
for (let {
mediaBuffer,
mediaDuration
} of mediaFragments) {
await new Promise((resolveUpdatedMediaSource) => {
sourceBuffer.onupdateend = async(e) => {
sourceBuffer.onupdateend = null;
console.log(e, mediaDuration, mediaSource.duration
, video.paused, video.ended, video.currentTime
, "media source playing", video.readyState);
// https://bugzilla.mozilla.org/show_bug.cgi?id=1400587
// https://bugs.chromium.org/p/chromium/issues/detail?id=766002&q=label%3AMSEptsdtsCleanup
try {
sourceBuffer.timestampOffset += mediaDuration;
resolveUpdatedMediaSource();
} catch (err) {
console.error(err);
resolveUpdatedMediaSource();
}
}
sourceBuffer.appendBuffer(mediaBuffer);
})
}
mediaSource.endOfStream()
} else {
console.warn(mimeCodec + " not supported");
}
};
})
return recordedMedia
};
mediaFragmentRecorder(geckoUrl)
.then(recordedMediaFragments => {
// do stuff with recorded media fragments
console.log(recordedMediaFragments);
const select = document.createElement("select");
for (let {
mediaFileName,
mediaBlob,
mediaFragmentType
} of Object.values(recordedMediaFragments)) {
const option = new Option(mediaFileName, URL.createObjectURL(mediaBlob));
select.appendChild(option);
}
select.onchange = () => {
document.getElementById("fragments").src = select.value;
}
video.parentNode.insertBefore(select, video);
video.controls = true;
video.currentTime = video.buffered.start(0);
})
.catch(err => console.error(err));
}
})()
</script>
</body>
</html>
代替可能会弃用多个曲目的"sequence"
模式,当前代码用于满足Chromium和Firefox的要求
需要在MediaSource
部分代码进行哪些调整才能在Chromium上呈现相同的结果,Firefox目前使用"segments"
.mode
按预期呈现?
或者,当SourceBuffer
.mode
设置为"segments"
时,在Chromium实施多轨支持是否存在错误?