网络音频api,优雅地停止声音

时间:2017-01-06 17:59:51

标签: javascript audio web-audio

网络音频API提供方法.stop()以停止声音。 我希望我的声音在停止前减小音量。为此,我使用了增益节点。然而,我正面临着一些奇怪的问题,其中一些声音不起作用,我无法弄清楚原因。

这是我所做的一个愚蠢的版本:

https://jsfiddle.net/01p1t09n/1/

如果你用setTimeout()删除所有声音播放的行,你会听到。当setTimeout不是每个声音播放。让我感到困惑的是,我相应地使用pushshift来找到正确的声音来源,但似乎是另一个停止播放。我能看到这种情况的唯一方法是AudioContext.decodeAudioData不同步。只需尝试使用jsfiddle就可以更好地理解并明确地佩戴耳机。

以下是jsfiddle的代码:

  let url = "https://raw.githubusercontent.com/gleitz/midi-js-soundfonts/gh-pages/MusyngKite/acoustic_guitar_steel-mp3/A4.mp3";
  let soundContainer = {};
  let notesMap = {"A4": [] };
  let _AudioContext_ = AudioContext || webkitAudioContext;
  let audioContext = new _AudioContext_();

  var oReq = new XMLHttpRequest();
  oReq.open("GET", url, true);
  oReq.responseType = "arraybuffer";
  oReq.onload = function (oEvent) {
    var arrayBuffer = oReq.response; 
    makeLoop(arrayBuffer);
  };
  oReq.send(null);

  function makeLoop(arrayBuffer){
     soundContainer["A4"] = arrayBuffer;
     let currentTime = audioContext.currentTime;
     for(let i = 0; i < 10; i++){
        //playing at same intervals
            play("A4", currentTime + i * 0.5);
        setTimeout( () => stop("A4"), 500 + i * 500); //remove this line you will hear all the sounds.
     }
  }

  function play(notePlayed, start) {    

      audioContext.decodeAudioData(soundContainer[notePlayed], (buffer) => {
      let source; 
      let gainNode; 
        source = audioContext.createBufferSource(); 
        gainNode = audioContext.createGain();
        // pushing notes in note map
        notesMap[notePlayed].push({ source, gainNode });
        source.buffer = buffer;                   
        source.connect(gainNode);
        gainNode.connect(audioContext.destination);
        gainNode.gain.value = 1;
        source.start(start);
       });
    }

      function stop(notePlayed){    
        let note = notesMap[notePlayed].shift();

        note.source.stop();
     }

这只是为了解释为什么我这样做,你可以跳过它,它只是解释为什么我不使用stop()

我正在做这一切的原因是因为我想优雅地停止声音,所以如果有可能不使用setTimeout这样做,我很乐意接受它。

基本上我在顶部有一张包含我的声音的地图(如A1,A#1,B1,......等注释)。

soundMap = {"A": [], "lot": [], "of": [], "sounds": []};

play() fct,我在播放声音后填充数组:

  play(sound) { 
    // sound is just { soundName, velocity, start}   
    let source; 
    let gainNode; 
    // sound container is just a map from soundname to the sound data.
    this.audioContext.decodeAudioData(this.soundContainer[sound.soundName], (buffer) => {
      source = this.audioContext.createBufferSource(); 
      gainNode = this.audioContext.createGain();
      gainNode.gain.value = sound.velocity;
      // pushing sound in sound map
      this.soundMap[sound.soundName].push({ source, gainNode });
      source.buffer = buffer;                   
      source.connect(gainNode);
      gainNode.connect(this.audioContext.destination);
      source.start(sound.start);
     });
  }

现在停止声音的部分:

  stop(sound){   
    //remember above, soundMap is a map from "soundName" to {gain, source} 
    let dasound = this.soundMap[sound.soundName].shift();
    let gain = dasound.gainNode.gain.value - 0.1;

    // we lower the gain via incremental values to not have the sound stop abruptly
    let i = 0;
    for(; gain > 0; i++, gain -= 0.1){ // watchout funky syntax
      ((gain, i) => {
        setTimeout(() => dasound.gainNode.gain.value = gain, 50 * i );
      })(gain, i)
    }
    // we stop the source after the gain is set at 0. stop is in sec
    setTimeout(() => note.source.stop(), i * 50);
  }

1 个答案:

答案 0 :(得分:1)

啊,是的,是的,是的!我终于找到了很多东西,最终还是费心去阅读doc中的“一切”(对角线)。让我告诉你这个api是粗糙的钻石。无论如何,他们实际上拥有我想要的Audio param

  

AudioParam接口表示与音频相关的参数,通常是AudioNode的参数(例如GainNode.gain)。一个   AudioParam可以设置为特定值或值的变化,和   可以安排在特定时间和特定时间发生   图案。

它有一个函数linearRampToValueAtTime()

他们甚至有一个我问过的例子!

// create audio context
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioCtx = new AudioContext();

// set basic variables for example
var myAudio = document.querySelector('audio');
var pre = document.querySelector('pre');
var myScript = document.querySelector('script');

pre.innerHTML = myScript.innerHTML;

var linearRampPlus = document.querySelector('.linear-ramp-plus');
var linearRampMinus = document.querySelector('.linear-ramp-minus');

// Create a MediaElementAudioSourceNode
// Feed the HTMLMediaElement into it
var source = audioCtx.createMediaElementSource(myAudio);

// Create a gain node and set it's gain value to 0.5
var gainNode = audioCtx.createGain();

// connect the AudioBufferSourceNode to the gainNode
// and the gainNode to the destination
gainNode.gain.setValueAtTime(0, audioCtx.currentTime);
source.connect(gainNode);
gainNode.connect(audioCtx.destination);

// set buttons to do something onclick
linearRampPlus.onclick = function() {
  gainNode.gain.linearRampToValueAtTime(1.0, audioCtx.currentTime + 2);
}

linearRampMinus.onclick = function() {
  gainNode.gain.linearRampToValueAtTime(0, audioCtx.currentTime + 2);
}

Working example here

他们也有不同类型的时间,比如指数而不是线性渐变,我想这更适合这种情况。