我正在尝试创建一个音频可视化器,该可视化器将根据正在播放的音频进行脉动。我已经使用three.js创建了一个3D模型,该模型当前会随机产生脉冲,但是我在理解如何获取音频文件本身时遇到了麻烦。
我使用this repo作为提示,但是我停留在App.js文件中。这段代码的目的是通过麦克风从用户那里获得输入,而我在理解WebAPI文档以获取音频文件时遇到了麻烦。
我如何指向该应用以从文件而不是麦克风获取音频?
编辑:
audio.js
import React, { Component } from 'react';
import AudioAnalyser from './audioAnalyser';
import song from './Teehee.mp3';
class Audio extends Component {
constructor(props) {
super(props);
this.state = {
audioStatus: 'PAUSED'
};
this.audioEle = null;
this.songName = 'Blues in A';
}
componentDidMount() {
this.audioEle = document.getElementById('audio-element');
this.audioEle.oncanplay = (e) => {
// safari
if (this.isSafari()) {
return
}
this.play()
}
}
isSafari = () => {
return window.navigator.userAgent.indexOf('Safari') > -1 && window.navigator.userAgent.indexOf('Chrome') === -1
}
pause = () => {
this.audioEle.pause()
this.setState({
audioStatus: 'PAUSED'
})
}
play = () => {
this.audioEle.play()
this.setState({
audioStatus: 'PLAYING'
})
console.log(this.state);
}
toggleMusic = () => {
console.log(this.state);
if (this.state.audioStatus === 'PLAYING') {
this.pause();
} else {
this.play();
}
}
render() {
return (
<div className="AudioPlayer">
<audio id="audio-element" preload="true" src={`${song}`} crossorigin="anonymous" ></audio>
<div className="controls">
<button onClick={this.toggleMusic}>
{this.state.audioStatus ? 'PAUSED' : 'PLAYING'}
</button>
</div>
{this.state.audioStatus ? <AudioAnalyser audio={this.state.audioStatus} /> : ''}
</div>
);
}
}
export default Audio;
audioAnalyser.js
/*
This component will analyse an audio file using the Web Audio API, more information can be found here:
https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API
https://www.twilio.com/blog/audio-visualisation-web-audio-api--react
*/
import React, { Component } from 'react';
import AudioVisualiser from './audioVisualizer';
class AudioAnalyser extends Component {
constructor(props) {
super(props);
this.state = { audioData: new Uint8Array(0) };
this.tick = this.tick.bind(this);
}
// When Component mounts, set up Web Autio API objets.
componentDidMount() {
// The AudioContext interface represents an audio-processing graph built from audio modules linked together, each represented by an AudioNode.
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
// The AnalyserNode interface represents a node able to provide real-time frequency and time-domain analysis information.
this.analyser = this.audioContext.createAnalyser();
// This dataArray will be used to store the waveform data that the AnalyserNode will be creating.
this.dataArray = new Uint8Array(this.analyser.frequencyBinCount);
console.log(this.props);
console.log(this.props.audio);
// The source, which is the audio file.
this.source = this.audioContext.createMediaStreamSource(this.props.audio);
// connect source to the anlayser .
this.source.connect(this.analyser);
}
// Method that will be called every time requestAnimationFrame runs.
// The function will copy the current waveform as an array of integers, from the AnalyserNode into the dataArray.
// It will then update the audioData property in the component's state with the dataArray.
// Finally, it will call on requestAnimationFrame again to request the next update.
tick() {
// Use the analyser to update the visualization from the dataArray
this.analyser.getByteTimeDomainData(this.dataArray);
//
this.setState({ audioData: this.dataArray });
this.rafId = requestAnimationFrame(this.tick);
}
componentWillUnmount() {
cancelAnimationFrame(this.rafId);
this.analyser.disconnect();
this.source.disconnect();
}
render() {
return <AudioVisualiser audioData={this.state.audioData} />;
}
}
export default AudioAnalyser;