我正在使用Web Audio API创建基于Recorderjs的网络录音机。目前,我的录像机似乎在Firefox上工作得很好。我遇到的问题是,在Chrome上,我的程序似乎记录了音频(或者至少记录了正确的时间),但是当我尝试播放录音时,将其编码为WAV文件后,音频似乎是录音最后一个声音的连续循环,但仍然可以播放正确的时间。
非常感谢任何帮助!
以下是相关代码:
//toggle recording
function recordButton() {
//if not currently recording already
if (recording === false) {
//if this is initial recording
if (!somethingRecorded) {
arrayPos.push(0); //recLength at 0 sceonds is also 0
}
//check if dictation is being played
if (playing) {
//if it is, then first pause audio
dictation.pause();
playing = false;
}
//if need to record over previous audio
if (timeBar.value < timeBar.max) {
console.log("In rewound block");
seconds = Number(timeBar.value); //convert to number since timeBar.value is a string
//erase buffers after place where rewound to
var index = timeBar.value / 0.1; //calculate index in arrayPos that the value for the current time will be, since values are pushed into arrayPos every 0.1 seconds
for (var i = numBuffers; i > arrayPos[index]; i--) {
recLength -= recBuffersL[i - 1].length;
recBuffersL.pop();
recBuffersR.pop();
numBuffers--;
}
}
console.log("Start Record");
recording = true;
timeBar.max = 300;
intervalID = setInterval(updateTime, 100);
somethingRecorded = true;
//disable buttons
disableButtons([0, 0, 1, 0, 0, 0]);
}
else {
console.log("Stop Record");
recording = false;
clearInterval(intervalID);
timeBar.max = timeBar.value;
window.document.getElementById("maxtime").innerHTML = Math.floor(timeBar.max / 60) + ":" + ("0" + Math.floor(timeBar.max % 60) ).slice(-2);
//disable buttons and save audio
disableButtons([0, 0, 0, 1, 0, 0]);
saveAudio();
}
}
//save audio file
function saveAudio(fileType) {
fileType = fileType || 'audio/wav';
//flatten out the input channels
var mergedL = new Float32Array(recLength);
var mergedR = new Float32Array(recLength);
offset = 0;
for (var i = 0; i < recBuffersL.length; i++) //both arrays should have same length
{
mergedL.set(recBuffersL[i], offset);
mergedR.set(recBuffersR[i], offset);
offset += recBuffersL[i].length;
}
//combine two channels of input into one single sample
var combined = new Float32Array(mergedL.length + mergedR.length);
for (var i = 0; i < mergedL.length + mergedR.length; i += 2) {
combined[i] = mergedL[i / 2];
combined[i + 1] = mergedR[i / 2];
}
console.log(combined);
//encode sample into WAV format
var dataview = encodeWAV(combined);
//final binary blob, then call functions to set up playback and download
var audioBlob = new Blob([dataview], { type: fileType });
var blobURL = (window.URL || window.webkitURL).createObjectURL(audioBlob);
setUpPlayback(blobURL);
// doneEncoding(audioBlob);
}
//helper function for writing strings into DataView
function writeString(view, offset, string) {
for (var i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
//encode into WAV file
function encodeWAV(audio) {
//create buffer and view to create WAV file
var buffer = new ArrayBuffer(44 + audio.length * 2);
var view = new DataView(buffer);
// RIFF identifier
writeString(view, 0, 'RIFF');
// file length
view.setUint32(4, 32 + audio.length * 2, true);
// RIFF type
writeString(view, 8, 'WAVE');
// format chunk identifier
writeString(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// channel count
view.setUint16(22, 2, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 4, true);
// block align (channel count * bytes per sample)
view.setUint16(32, 4, true);
// bits per sample
view.setUint16(34, 16, true);
// data chunk identifier
writeString(view, 36, 'data');
// data chunk length
view.setUint32(40, audio.length * 2, true);
//write PCM samples
var index = 44;
for (var i = 0; i < audio.length; i++) {
view.setInt16(index, audio[i] * (0x7FFF), true);
index += 2;
}
return view;
}
答案 0 :(得分:1)
万一有人遇到同样的问题,我想出来了。问题实际上是在inaudioprocess函数中。由于getChannelData实际上返回的是指针而不是实际的数组,因此在将数据推入包含缓冲区的数组之前,必须将数据克隆到Float32Array中。