我正在制作一个小小的音乐应用程序。 我希望能够在不依赖麦克风的情况下录制浏览器中发出的声音。 到目前为止,我所看到的关于MediaRecorder api的一切都表明它依赖于麦克风。 理想情况下,我希望在不使用外部库的情况下实现此目标。
作为参考,这是我如何制作一个非常简单的声音。
var congo = new Audio('http://www.denhaku.com/r_box/sr16/sr16perc/hi conga.wav');
var drumpad = document.getElementById('drumpad');
drumpad.addEventListener('click', function(){
congo.play();
});
谢谢
编辑:为了更清楚,我如何记录我所包含的代码片段所产生的声音,而不依赖于计算机内置麦克风。例如,假设用户正在使用drumpad制作声音并且他们戴着耳机,麦克风将毫无用处。即使他们不戴耳机,他们仍然会拾起很多背景噪音。我想将正在录制的声音与用户打开此应用程序的特定浏览器选项卡中的音乐隔离开来。
答案 0 :(得分:1)
到目前为止,我所看到的关于MediaRecorder api的一切都表明它依赖于麦克风。
不,MediaRecorder API确实依赖MediaStreams,但这些MediaStream不一定是LocalMediaStreams(即来自gUM):
如果加载的媒体符合同源策略,您可以使用MediaElement(<audio>
,<video>
)&#39; captureStream()
方法获取MediaStream 。
但是这将为每个MediaElement返回一个MediaStream,在您的情况下,它可能不是最好的解决方案。
相反,跳转到Web Audio API,无论如何更适合像鼓垫这样的应用。
Web Audio API确实有createMediaStreamDestination()
方法,该方法将返回MediaStreamAudioDestinationNode,其.stream
属性中将包含MediaStream。您将连接到此MediaStreamAudioDestinationNode的所有其他节点将在MediaStream中播出,您将能够从MediaRecorder录制它。
让我们回收此drum-kit demo以包含录音机:
(function myFirstDrumKit() {
const db_url = 'https://dl.dropboxusercontent.com/s/'; // all our medias are stored on dropbox
// we'll need to first load all the audios
function initAudios() {
const promises = drum.parts.map(part => {
return fetch(db_url + part.audio_src) // fetch the file
.then(resp => resp.arrayBuffer()) // as an arrayBuffer
.then(buf => drum.a_ctx.decodeAudioData(buf)) // then decode its audio data
.then(AudioBuf => {
part.buf = AudioBuf; // store the audioBuffer (won't change)
return Promise.resolve(part); // done
});
});
return Promise.all(promises); // when all are loaded
}
function initImages() {
// in this version we have only an static image,
// but we could have multiple per parts, with the same logic as for audios
var img = new Image();
img.src = db_url + drum.bg_src;
drum.bg = img;
return new Promise((res, rej) => {
img.onload = res;
img.onerror = rej;
});
}
let general_solo = false;
let part_solo = false;
const drum = {
a_ctx: new AudioContext(),
generate_sound: (part) => {
// called each time we need to play a source
const source = drum.a_ctx.createBufferSource();
source.buffer = part.buf;
source.connect(drum.gain);
// to keep only one playing at a time
// simply store this sourceNode, and stop the previous one
if(general_solo){
// stop all playing sources
drum.parts.forEach(p => (p.source && p.source.stop(0)));
}
else if (part_solo && part.source) {
// stop only the one of this part
part.source.stop(0);
}
// store the source
part.source = source;
source.start(0);
},
parts: [{
name: 'hihat',
x: 90,
y: 116,
w: 160,
h: 70,
audio_src: 'kbgd2jm7ezk3u3x/hihat.mp3'
},
{
name: 'snare',
x: 79,
y: 192,
w: 113,
h: 58,
audio_src: 'h2j6vm17r07jf03/snare.mp3'
},
{
name: 'kick',
x: 80,
y: 250,
w: 200,
h: 230,
audio_src: '1cdwpm3gca9mlo0/kick.mp3'
},
{
name: 'tom',
x: 290,
y: 210,
w: 110,
h: 80,
audio_src: 'h8pvqqol3ovyle8/tom.mp3'
}
],
bg_src: '0jkaeoxls18n3y5/_drumkit.jpg?dl=0',
//////////////////////
/// The recording part
//////////////////////
record: function record(e) {
const btn = document.getElementById('record');
const chunks = [];
// init a new MediaRecorder with our StreamNode's stream
const recorder = new MediaRecorder(drum.streamNode.stream);
// save every chunks
recorder.ondataavailable = e => chunks.push(e.data);
// once we're done recording
recorder.onstop = e => {
// export our recording
const blob = new Blob(chunks);
const url = URL.createObjectURL(blob);
// here in an <audio> element
const a = new Audio(url);
a.controls = true;
document.getElementById('records').appendChild(a);
// reset default click handler
btn.onclick = drum.record;
btn.textContent = 'record';
}
btn.onclick = function () {
recorder.stop();
};
// start recording
recorder.start();
btn.textContent = 'stop recording';
}
};
drum.gain = drum.a_ctx.createGain();
drum.gain.gain.value = .5;
drum.gain.connect(drum.a_ctx.destination);
// for recording
drum.streamNode = drum.a_ctx.createMediaStreamDestination();
drum.gain.connect(drum.streamNode);
document.getElementById('record').onclick = drum.record;
/////////////
//Unrelated to current question
////////////
function initCanvas() {
const c = drum.canvas = document.createElement('canvas');
const ctx = drum.ctx = c.getContext('2d');
c.width = drum.bg.width;
c.height = drum.bg.height;
ctx.drawImage(drum.bg, 0, 0);
document.body.appendChild(c);
addEvents(c);
}
const isHover = (x, y) =>
(drum.parts.filter(p => (p.x < x && p.x + p.w > x && p.y < y && p.y + p.h > y))[0] || false);
function addEvents(canvas) {
let mouse_hovered = false;
canvas.addEventListener('mousemove', e => {
mouse_hovered = isHover(e.pageX - canvas.offsetLeft, e.pageY - canvas.offsetTop)
if (mouse_hovered) {
canvas.style.cursor = 'pointer';
} else {
canvas.style.cursor = 'default';
}
})
canvas.addEventListener('mousedown', e => {
e.preventDefault();
if (mouse_hovered) {
drum.generate_sound(mouse_hovered);
}
});
const checkboxes = document.querySelectorAll('input');
checkboxes[0].onchange = function() {
general_solo = this.checked;
general_solo && (checkboxes[1].checked = part_solo = true);
};
checkboxes[1].onchange = function() {
part_solo = this.checked;
!part_solo && (checkboxes[0].checked = general_solo = false);
};
}
Promise.all([initAudios(), initImages()])
.then(initCanvas);
})()
&#13;
label{float: right}
&#13;
<button id="record">record</button>
<label>general solo<input type="checkbox"></label><br>
<label>part solo<input type="checkbox"></label><br>
<div id="records"></div>
&#13;