我有这个功能代码,可以在不同的时间分别指定两个currentTime来加载两个XMLHttpRequests音频缓冲区。如何将其插入“播放”按钮,并且仍然保留每个音频的预定时间? 我可以将其与mousedown事件侦听器一起使用,但我想将其替换为播放按钮。
我将向您展示我的app.js文件:
"use strict"
//Create the Audio Context, compatible with older Firefox and Chrome browsers
function audioContextCheck(){
if (typeof AudioContext !== "undefined"){
return new AudioContext();
}
else if (typeof webkitAudioContext !== "undefined") {
return new webkitAudioContext();
}
else if (typeof mozAudioContext !== "undefined") {
return new mozAudioContext();
}
else {
throw new Error('AudioContext not supported');
}
}
var audioContext = audioContextCheck();
//Create audio buffer to the audio file with the XMLHttpRequest
var audioBuffer2;
var getSound2 = new XMLHttpRequest();
getSound2.open("get","Audio2.mp3", true);
getSound2.responseType = "arraybuffer";
getSound2.onload = function(){
audioContext.decodeAudioData(getSound2.response, function(buffer2) {
audioBuffer2 = buffer2;
});
};
getSound2.send();
var audioBuffer;
var getSound = new XMLHttpRequest();
getSound.open("get","https://cdn.rawgit.com/devildrey33/devildrey33/ddb01d71/Ejemplos/BannerTest/Canciones/LevenRain_-_ActionMan_Versus_The_CyberParasites.mp3", true);
getSound.responseType = "arraybuffer";
//And then decode it at the first load of the page, at the beginning of everything ... it needs to be decoded in order to be processed by the Web Audio API
getSound.onload = function(){
audioContext.decodeAudioData(getSound.response, function(buffer) {
audioBuffer = buffer;
});
};
getSound.send();
//The EventListener iniciate it. "mousedown" is a click
document.getElementById("play").addEventListener("click", playback);
//Now create the function necessary to play back the audio buffer. This is the first step to add the buffer into the Audio Graph. The createBufferSource method is also called an Audio Buffer Source node.
function playback(){
var playSound2 = audioContext.createBufferSource();
playSound2.buffer = audioBuffer2;
var playSound = audioContext.createBufferSource();
playSound.buffer = audioBuffer;
//Connect it to the output of our node graph
playSound2.connect(audioContext.destination);
playSound.connect(audioContext.destination);
//Add the start method to the playSound variable
playSound2.start(audioContext.currentTime+6);
playSound.start(audioContext.currentTime+2);
}