音乐播放时如何显示频谱?

时间:2015-05-20 09:19:36

标签: html5 html5-canvas web-audio

我需要在播放音乐时显示频谱分析仪。 现在,spetrum绘图与音频播放器分开。 如果文件就绪(在加载XHR之后)和歌曲正在播放,则同步绘制频谱。

  var audioBuffer;
    var sourceNode;
    var analyser;
    var javascriptNode;
       var actx = new(AudioContext || webkitAudioContext)(), tid,
    url = "https://cdn.rawgit.com/epistemex/free-music-for-test-and-demo/master/music/kf_colibris.mp3";

    var ctx = $("#canvas").get()[0].getContext("2d");
    var gradient = ctx.createLinearGradient(0,0,0,300);
    gradient.addColorStop(1,'#000000');
    gradient.addColorStop(0.75,'#ff0000');
    gradient.addColorStop(0.25,'#ffff00');
    gradient.addColorStop(0,'#ffffff');

    setupAudioNodes();
    
    function setupAudioNodes() {
        // setup a javascript node
        javascriptNode = actx.createScriptProcessor(2048, 1, 1);
        // connect to destination, else it isn't called
        javascriptNode.connect(actx.destination);
        // setup a analyzer
        analyser = actx.createAnalyser();
        analyser.smoothingTimeConstant = 0.3;
        analyser.fftSize = 512;

        // create a buffer source node
        sourceNode = actx.createBufferSource();
        sourceNode.connect(analyser);
        analyser.connect(javascriptNode);
        sourceNode.connect(actx.destination);
    }

    // load the specified sound

    function playSound(buffer) {
        sourceNode.buffer = buffer;
        sourceNode.start(0);
    }

    // log if an error occurs
    function onError(e) {
        console.log(e);
    }

    // when the javascript node is called
    // we use information from the analyzer node
    // to draw the volume
    javascriptNode.onaudioprocess = function() {

        // get the average for the first channel
        var array =  new Uint8Array(analyser.frequencyBinCount);
        analyser.getByteFrequencyData(array);

        // clear the current state
        ctx.clearRect(0, 0, 1000, 325);

        // set the fill style
        ctx.fillStyle=gradient;
        drawSpectrum(array);

    }


    function drawSpectrum(array) {
        for ( var i = 0; i < (array.length); i++ ){
            var value = array[i];

            ctx.fillRect(i*5,325-value,3,325);
            //  console.log([i,value])
        }
    };

// old draw --------------------------------------------------------


// STEP 1: Load audio file using AJAX ----------------------------------
loadXHR(url, decode);

tid = setInterval(function() {document.querySelector("div").innerHTML += "."}, 500);

function loadXHR(url, callback) {
  try {
    var xhr = new XMLHttpRequest();
    xhr.open("GET", url);
    xhr.responseType = "arraybuffer";
    xhr.onerror = function() {console.log("Network error.")};
    xhr.onload = function() {
      if (xhr.status === 200) callback(xhr.response);
      else console.log("Loading error:" + xhr.statusText);
    };
    xhr.send();
  } catch (err) {console.log(err.message)}
}

// STEP 2: Decode the audio file ---------------------------------------
function decode(buffer) {
  clearInterval(tid);
  document.querySelector("div").innerHTML = "Decoding file...";
  actx.decodeAudioData(buffer, split);
}

// STEP 3: Split the buffer --------------------------------------------
function split(abuffer) {

  document.querySelector("div").innerHTML = "Splitting...";

  setTimeout(function() {   // to allow DOM to update status-text

    // calc number of segments and segment length
    var channels = abuffer.numberOfChannels,
        duration = abuffer.duration
        rate = abuffer.sampleRate,
        segmentLen = 10,
        count = Math.floor(duration / segmentLen),
        offset = 0,
       // block = 10 * rate;
       block = abuffer.length;

  //  while(count--) {
      var url = URL.createObjectURL(bufferToWave(abuffer, offset, block));
      var audio = new Audio(url);
      audio.controls = true;
      audio.volume = 0.5;
      audio.autoplay = true;
      document.body.appendChild(audio);
      //offset += block;
  //  }  

    document.querySelector("div").innerHTML = "Ready!";
  }, 60)


 playSound(abuffer);


}

// Convert a audio-buffer segment to a Blob using WAVE representation
function bufferToWave(abuffer, offset, len) {

  var numOfChan = abuffer.numberOfChannels,
      length = len * numOfChan * 2 + 44,
      buffer = new ArrayBuffer(length),
      view = new DataView(buffer),
      channels = [], i, sample,
      pos = 0;
      
  // write WAVE header
  setUint32(0x46464952);                         // "RIFF"
  setUint32(length - 8);                         // file length - 8
  setUint32(0x45564157);                         // "WAVE"
  
  setUint32(0x20746d66);                         // "fmt " chunk
  setUint32(16);                                 // length = 16
  setUint16(1);                                  // PCM (uncompressed)
  setUint16(numOfChan);
  setUint32(abuffer.sampleRate);
  setUint32(abuffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
  setUint16(numOfChan * 2);                      // block-align
  setUint16(16);                                 // 16-bit (hardcoded in this demo)
  
  setUint32(0x61746164);                         // "data" - chunk
  setUint32(length - pos - 4);                   // chunk length
  
  // write interleaved data
  for(i = 0; i < abuffer.numberOfChannels; i++)
    channels.push(abuffer.getChannelData(i));
  
  while(pos < length) {
    for(i = 0; i < numOfChan; i++) {             // interleave channels
      sample = Math.max(-1, Math.min(1, channels[i][offset])); // clamp
      sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767)|0; // scale to 16-bit signed int
      view.setInt16(pos, sample, true);          // update data chunk
      pos += 2;
    }
    offset++                                     // next source sample
  }

  // create Blob
  return new Blob([buffer], {type: "audio/wav"});
  
  function setUint16(data) {
    view.setUint16(pos, data, true);
    pos += 2;
  }
  
  function setUint32(data) {
    view.setUint32(pos, data, true);
    pos += 4;
  }
}
<canvas id="canvas" width="1000" height="325" style="display: block;"></canvas>

<div>Loading.</div>

1 个答案:

答案 0 :(得分:0)

不要使用ScriptProcessorNode的onaudioprocess进行可视化更新 - 完全跳过ScriptProcessor,并使用requestAnimationFrame调用getByteFrequencyData并进行可视化更新。