我分配了一个创建音频可视化器的任务,但是由于任何原因,我都无法弄清楚如何在代码中播放音频。我从上一个作业中复制了代码,该作业具有有效的音频元素,但是由于某些原因无法播放。
这是我版本中的相关JavaScript
let canvasElement, drawCtx;
canvasElement = document.querySelector('canvas');
drawCtx = canvasElement.getContext("2d");
let playButton, audioCtx, audioElement, sourceNode, analyserNode, gainNode;
const NUM_SAMPLES = 256;
let audioData = new Uint8Array(NUM_SAMPLES/2);
const SOUND_PATH = Object.freeze({
sound1: "media/New Adventure Theme.mp3",
sound2: "media/Peanuts Theme.mp3",
sound3: "media/The Picard Song.mp3"
});
// main functions
function setup()
{
setupWebAudio();
setupUI();
loop();
}
function setupUI()
{
addShapeButton.onclick = addShape;
document.querySelector(".fa-plus").onclick = showHide;
playButton = document.querySelector("#playButton");
playButton.onclick = e =>
{
console.log(`audioCtx.state = ${audioCtx.state}`);
// check if context is in suspended state (autoplay policy)
if (audioCtx.state == "suspended")
{
audioCtx.resume();
}
if (e.target.dataset.playing == "no")
{
audioElement.play();
e.target.dataset.playing = "yes";
}
else if (e.target.dataset.playing == "yes")
{
audioElement.pause();
e.target.dataset.playing = "no";
}
}
}
function setupWebAudio()
{
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioCtx = new AudioContext();
audioElement = document.querySelector("audio");
audioElement.src = SOUND_PATH.sound1;
sourceNode = audioCtx.createMediaElementSource(audioElement);
analyserNode = audioCtx.createAnalyser();
analyserNode.fftSize = NUM_SAMPLES;
gainNode = audioCtx.createGain();
gainNode.gain.value = 1;
sourceNode.connect(analyserNode);
analyserNode.connect(gainNode);
gainNode.connect(audioCtx.destination);
audioElement.src = "media/New Adventure Theme.mp3";
audioElement.onended = _ => {
playButton.dataset.playing = "no";
};
gainNode.gain.value = 50;
//audioCtx.resume();
audioElement.play();
}
这是以前的工作版本中的代码。
window.onload = init;
// SCRIPT SCOPED VARIABLES
// 1- here we are faking an enumeration - we'll look at another way to do this soon
const SOUND_PATH = Object.freeze({
sound1: "media/New Adventure Theme.mp3",
sound2: "media/Peanuts Theme.mp3",
sound3: "media/The Picard Song.mp3"
});
// 2 - elements on the page
let audioElement,canvasElement;
// UI
let playButton;
// 3 - our canvas drawing context
let drawCtx
// 4 - our WebAudio context
let audioCtx;
// 5 - nodes that are part of our WebAudio audio routing graph
let sourceNode, analyserNode, gainNode;
// 6 - a typed array to hold the audio frequency data
const NUM_SAMPLES = 256;
// create a new array of 8-bit integers (0-255)
let audioData = new Uint8Array(NUM_SAMPLES/2);
let maxRadius = 200;
let invert = false, tintRed = false, noise = false, sepia = false;
// FUNCTIONS
function init(){
setupWebaudio();
setupCanvas();
setupUI();
update();
}
function setupWebaudio(){
// 1 - The || is because WebAudio has not been standardized across browsers yet
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioCtx = new AudioContext();
// 2 - get a reference to the <audio> element on the page
audioElement = document.querySelector("audio");
audioElement.src = SOUND_PATH.sound3;
// 3 - create an a source node that points at the <audio> element
sourceNode = audioCtx.createMediaElementSource(audioElement);
// 4 - create an analyser node
analyserNode = audioCtx.createAnalyser();
/*
We will request NUM_SAMPLES number of samples or "bins" spaced equally
across the sound spectrum.
If NUM_SAMPLES (fftSize) is 256, then the first bin is 0 Hz, the second is 172 Hz,
the third is 344Hz. Each bin contains a number between 0-255 representing
the amplitude of that frequency.
*/
// fft stands for Fast Fourier Transform
analyserNode.fftSize = NUM_SAMPLES;
// 5 - create a gain (volume) node
gainNode = audioCtx.createGain();
gainNode.gain.value = 1;
// 6 - connect the nodes - we now have an audio graph
sourceNode.connect(analyserNode);
analyserNode.connect(gainNode);
gainNode.connect(audioCtx.destination);
}
function setupCanvas(){
canvasElement = document.querySelector('canvas');
drawCtx = canvasElement.getContext("2d");
}
function setupUI(){
playButton = document.querySelector("#playButton");
playButton.onclick = e => {
console.log(`audioCtx.state = ${audioCtx.state}`);
// check if context is in suspended state (autoplay policy)
if (audioCtx.state == "suspended") {
audioCtx.resume();
}
if (e.target.dataset.playing == "no") {
audioElement.play();
e.target.dataset.playing = "yes";
// if track is playing pause it
}
else if (e.target.dataset.playing == "yes") {
audioElement.pause();
e.target.dataset.playing = "no";
}
};
let volumeSlider = document.querySelector("#volumeSlider");
volumeSlider.oninput = e => {
gainNode.gain.value = e.target.value;
volumeLabel.innerHTML = Math.round((e.target.value/2 * 100));
};
volumeSlider.dispatchEvent(new InputEvent("input"));
let radiusSlider = document.querySelector("#circleRadiusSlider");
radiusSlider.oninput = e => {
maxRadius = e.target.value;
circleRadiusLabel.innerHTML = Math.round((e.target.value));
};
radiusSlider.dispatchEvent(new InputEvent("input"));
document.querySelector("#trackSelect").onchange = e =>{
audioElement.src = e.target.value;
// pause the current track if it is playing
playButton.dispatchEvent(new MouseEvent("click"));
};
// if track ends
audioElement.onended = _ => {
playButton.dataset.playing = "no";
};
document.querySelector("#fsButton").onclick = _ =>{
requestFullscreen(canvasElement);
};
document.querySelector("#tintRedCheck").checked = tintRed;
document.querySelector("#tintRedCheck").onchange = e =>{
tintRed = e.target.checked;
}
document.querySelector("#invertCheck").checked = invert;
document.querySelector("#invertCheck").onchange = e =>{
invert = e.target.checked;
}
document.querySelector("#noiseCheck").checked = noise;
document.querySelector("#noiseCheck").onchange = e =>{
noise = e.target.checked;
}
document.querySelector("#sepiaCheck").checked = sepia;
document.querySelector("#sepiaCheck").onchange = e =>{
sepia = e.target.checked;
}
}
我似乎无法找到两者之间的区别,而且我对调试方法一无所知。我很抱歉这不是人们将来要学习的非常相关的问题,但是对此事的任何帮助将不胜感激。
答案 0 :(得分:0)
要在将媒体元素传递到.createMediaElementSource()
setupUI
中的fetch()
进行播放时播放媒体,请使用Body.blob()
将资源作为Blob
获取,在链接的.then()
中使用URL.createObjectURL()
创建Blob URL
以避免
MediaElementAudioSource outputs zeroes due to CORS access restrictions for https://path/to/resource
错误。
在用户单击.play()
之前,将if
移至以下e.dataset.playing
语句之外或将"no"
设置为"Play"
if (e.target.dataset.playing == "no") {}
以下代码
shapes[myShapeIndex].rot = myRotationList.childNodes.item(1).childNodes.item(0).value;
记录错误
(index):381 Uncaught TypeError: Cannot read property 'childNodes' of null
at valueLink ((index):381)
at loop ((index):249)
尽管媒体确实在播放。