我正在使用subtitle creation tool并希望添加一项功能,可在视频播放时显示音频波形的下5秒左右。我相信对即将播放的音频进行视觉预览将允许用户更准确地放置字幕点。
我使用分析器找到this method,但它将音频样本限制为2048(大约1/24秒),这对于预览来说太小了。
我还通过捕捉" audioprocess"找到了this method。事件但仍限于大约16k的样本或1/3秒。再一次,没有多长时间才有用。
我做了一个quick test,看看需要多长时间才能看到5秒钟的样本,它在20ms的平均值内取平均值。所以我认为只要我可以访问缓冲区,它就可以实时处理它。我可能不得不降低FPS,但我认为即使在15fps,它仍然对用户有用。
有没有一种方法我可以在视频正在播放时说出接下来的240,000个样本(5秒音频)?
<!DOCTYPE html>
<html>
<body id="body">
<canvas id="canvas" width="1000" height="200"></canvas>
<script>
var count=0;
var samples=new Int32Array(48000*5);
var CANVAS_HEIGHT=200;
var canvas = document.getElementById('canvas');
var ctx = canvas.getContext('2d');
function processSamples() {
var start = new Date().getTime();
ctx.clearRect(0, 0, 1000, 200);
for (var i=0; i < samples.length; i++) {
samples[i]=parseInt(Math.random() * CANVAS_HEIGHT);
}
// We can't display each sample on it's own line... So take the average sample and display it
// 1000 pixels for 5 second preview. So 200px per second. 48,000 samples per second / 200px = 240 samples per pixel
var sample;
for (var i=0; i < samples.length/240; i++) {
sample=0;
for (var j=0; j < 240; j++) {
sample += samples[i*240+j];
}
sample=parseInt(sample/240);
ctx.fillRect(i, 0, 1, sample);
}
if (count < 10)
setTimeout(processSamples, 1000);
count++;
var end = new Date().getTime();
var time = end - start;
console.log('Execution time: ' + time);
}
window.onload=processSamples;
</script>
</body>
</html>
答案 0 :(得分:0)
此时似乎无法扩展缓冲区以在播放期间获得更大的样本大小。但是,我能够通过this method生成整个文件的静态波形。我修改了它here以使用文件API。
将文件加载到arrayBuffer中,然后将缓冲区传递给audioContext.decodeAudioData(arrayBuffer,callbackFunction)
var audioContext = new AudioContext();
function drawBuffer( width, height, context, buffer ) {
var data = buffer.getChannelData( 0 );
var step = Math.ceil( data.length / width );
var amp = height / 2;
for(var i=0; i < width; i++){
var min = 1.0;
var max = -1.0;
for (var j=0; j<step; j++) {
var datum = data[(i*step)+j];
if (datum < min)
min = datum;
if (datum > max)
max = datum;
}
context.fillRect(i,(1+min)*amp,1,Math.max(1,(max-min)*amp));
}
}
function initAudio() {
var file = document.getElementById('fileItem').files[0];
var reader = new FileReader();
reader.onerror = function(e) {
console.log("Error reading file");
console.log(e);
}
reader.onload = function(e) {
console.log("loading");
var arrayBuffer = e.target.result;
audioContext.decodeAudioData( arrayBuffer,
function(buffer) {
console.log("drawing");
var canvas = document.getElementById("canvas");
drawBuffer( canvas.width, canvas.height, canvas.getContext('2d'), buffer );
} );
}
reader.readAsArrayBuffer(file);
}
&#13;
body {
width: 100%;
height: 100%;
}
#canvas {
position: absolute;
top: 0;
left: 0;
width: 800px;
height: 200px;
background-color: blue;
}
#fileItem {
position: absolute;
top: 205px;
left: 0px;
}
#button {
position: absolute;
top: 205px;
left: 250px;
&#13;
<body>
<canvas id="canvas"></canvas>
<input id="fileItem" type="file"/>
<button id="button" onclick="initAudio();">Draw Waveform</button>
</body>
&#13;