如何从AudioContext.createAnalyser()控制音频?

时间:2017-11-05 04:18:56

标签: javascript audio-streaming getusermedia rtcmulticonnection

我使用WebRTC,我正确地收到了流。如果我发送浏览器,一切正常,但当我尝试使用AudioContext-createAnalyser检查频率时。它也继续工作,但我不再控制音频的音量。我在这里留下代码:

function startUserMedia(stream) {

var canvas, ctx, again, fbc_array, bars = 100, bar_x, bar_width, bar_height;
var context = new AudioContext();
var analyser = context.createAnalyser();


source = context.createMediaStreamSource(stream); 
source.connect(analyser);
analyser.connect(context.destination);

canvas = document.getElementById("analyser");
ctx = canvas.getContext("2d");

frameLooper();

function frameLooper(){

    window.requestAnimationFrame(frameLooper);
    fbc_array = new Uint8Array(analyser.frequencyBinCount);
    analyser.getByteFrequencyData(fbc_array);
    ctx.clearRect(0, 0, canvas.width, canvas.height);
    ctx.fillStyle = "rgb(30, 180, 255)";

    for(var i = 0; i < bars; i++){
        bar_x = i * 3;
        bar_width = 2;
        bar_height = -(fbc_array[i] / 2);
        ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
    }
}
}

提前致谢

修改

var connection = new RTCMultiConnection();
connection.socketURL = 'URL...';

connection.socketMessageEvent = 'message';
connection.session = { audio: true, video: false, oneway: true };
connection.mediaConstraints = { audio: true, video: false }
connection.sdpConstraints.mandatory = { OfferToReceiveAudio: false, OfferToReceiveVideo: false };

connection.onstream = function(event){

    var mediaElement = event.mediaElement;

    mediaElement.muted = true;
    mediaElement.volume = 1;
    mediaElement.id = event.streamid;
    $("#elementHtml").append(mediaElement);

    startUserMedia(event.stream);

1 个答案:

答案 0 :(得分:0)

我必须承认我不知道你正在使用的RTCMultiConnection库,更不用说它onstream处理程序,也不知道event.mediaElement来自哪里,所以你可能有尝试一下。

但无论这个event.MediaStream如何链接到MediaStream,我都会尝试枚举一些基本的方法。

  1. 您想要从屏幕内MediaElement的默认控件中控制输出卷:在这种情况下,请将此MediaElement的srcObject设置为MediaStream并且不要不要将它静音,也不要将分析器节点连接到音频上下文的目的地:
  2. starter.onclick = function(){
      this.parentNode.removeChild(this);
      getStream(onstream);
     };
    
    function onstream(stream) {
      // Set our in-doc-audio as the audio output
      // I don't know if your event.MediaStream could work as-is... You will have to try.
      var audio = document.querySelector('audio');
      audio.srcObject = stream;
      startUserMedia(stream);
      audio.play();
    }
    
    
    function startUserMedia(stream) {
      var canvas, ctx, again, fbc_array, bars = 100,
        bar_x, bar_width, bar_height;
      var context = new (window.AudioContext || window.webkitAudioContext)();
      var analyser = context.createAnalyser(),
    
      source = context.createMediaStreamSource(stream);
      source.connect(analyser);
      // In this case we don't connect to the audioCtx destination
      //analyser.connect(context.destination);
    
      canvas = document.getElementById("analyser");
      ctx = canvas.getContext("2d");
    
      frameLooper();
      function frameLooper() {
        window.requestAnimationFrame(frameLooper);
        fbc_array = new Uint8Array(analyser.frequencyBinCount);
        analyser.getByteFrequencyData(fbc_array);
        ctx.clearRect(0, 0, canvas.width, canvas.height);
        ctx.fillStyle = "rgb(30, 180, 255)";
        for (var i = 0; i < bars; i++) {
          bar_x = i * 3;
          bar_width = 2;
          bar_height = -(fbc_array[i] / 2);
          ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
        }
      }
    }
    
    // Snippet way to get a MediaStream
    function getStream(callback) {
      var aud = new Audio('https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3?dl=03');
      aud.crossOrigin = true;
      aud.onloadedmetadata = function() {
      	var ctx = new (window.AudioContext || window.webkitAudioContext)(),
        src = ctx.createMediaElementSource(this),
        streamNode = ctx.createMediaStreamDestination();
        src.connect(streamNode);
        callback(streamNode.stream);
      };
      aud.play();
    }
    #starter ~ *{
        visibility: hidden;
    } 
    <button id="starter">start</button>
    <audio controls></audio>
    <canvas id="analyser"></canvas>

    1. 您想要从自制输入中控制输出音量: 然后甚至不使用MediaElement,只需创建一个gainNode,您将在其上连接AnalyserNode,并且您将连接到目标。要控制输出音量,您只需设置gainNode.gain value
    2. 即可

      starter.onclick = function(){
        this.parentNode.removeChild(this);
        getStream(startUserMedia);
      };
      
      function startUserMedia(stream) {
        var canvas, ctx, again, fbc_array, bars = 100,
          bar_x, bar_width, bar_height;
        var context = new (window.AudioContext || window.webkitAudioContext)();
        var analyser = context.createAnalyser();
        // create a gainNode that will control our output volume
        var gainNode = context.createGain();
        // control it from our <input>
        vol.oninput = function(){
          gainNode.gain.value = this.value;
        };
        source = context.createMediaStreamSource(stream);
        source.connect(analyser);
        // In this case we do connect the analyser output to the gainNode
        analyser.connect(gainNode);
        // and the gainNode to the context's destination
        gainNode.connect(context.destination);
      
        canvas = document.getElementById("analyser");
        ctx = canvas.getContext("2d");
      
        frameLooper();
        function frameLooper() {
          window.requestAnimationFrame(frameLooper);
          fbc_array = new Uint8Array(analyser.frequencyBinCount);
          analyser.getByteFrequencyData(fbc_array);
          ctx.clearRect(0, 0, canvas.width, canvas.height);
          ctx.fillStyle = "rgb(30, 180, 255)";
          for (var i = 0; i < bars; i++) {
            bar_x = i * 3;
            bar_width = 2;
            bar_height = -(fbc_array[i] / 2);
            ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
          }
        }
      }
      
      // Snippet way to get a MediaStream
      function getStream(callback) {
        var aud = new Audio('https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3?dl=03');
        aud.crossOrigin = true;
        aud.onloadedmetadata = function() {
        	var ctx = new (window.AudioContext || window.webkitAudioContext)(),
          src = ctx.createMediaElementSource(this),
          streamNode = ctx.createMediaStreamDestination();
          src.connect(streamNode);
          callback(streamNode.stream)
        };
        aud.play();
      }
      #starter ~ *{
          visibility: hidden;
      }
      <button id="starter">start</button>
      <label>volume: <input type="range" min="0" max="1" value="1" step="0.05" id="vol"></label><br>
      <canvas id="analyser"></canvas>

      1. 您想要从自制输入中控制输入 [和输出]音量: 2。一样,除了您将在mediaStreamSource和分析器之间添加其他gainNode:
      2. starter.onclick = function(){
          this.parentNode.removeChild(this);
          getStream(startUserMedia);
         };
        
        function startUserMedia(stream) {
          var canvas, ctx, again, fbc_array, bars = 100,
            bar_x, bar_width, bar_height;
          var context = new (window.AudioContext || window.webkitAudioContext)();
          var analyser = context.createAnalyser();
          // create two gainNodes
          var gainNode_in = context.createGain();
          vol_in.oninput = function(){
            gainNode_in.gain.value = this.value;
          };
          var gainNode_out = context.createGain();
          vol_out.oninput = function(){
            gainNode_out.gain.value = this.value;
          };
          
          source = context.createMediaStreamSource(stream);
          source.connect(gainNode_in);  // connect to the input gainNode
          gainNode_in.connect(analyser);
          analyser.connect(gainNode_out);
          // and the gainNode to the context's destination
          gainNode_out.connect(context.destination);
        
          canvas = document.getElementById("analyser");
          ctx = canvas.getContext("2d");
        
          frameLooper();
          function frameLooper() {
            window.requestAnimationFrame(frameLooper);
            fbc_array = new Uint8Array(analyser.frequencyBinCount);
            analyser.getByteFrequencyData(fbc_array);
            ctx.clearRect(0, 0, canvas.width, canvas.height);
            ctx.fillStyle = "rgb(30, 180, 255)";
            for (var i = 0; i < bars; i++) {
              bar_x = i * 3;
              bar_width = 2;
              bar_height = -(fbc_array[i] / 2);
              ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
            }
          }
        }
        
        // Snippet way to get a MediaStream
        function getStream(callback) {
          var aud = new Audio('https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3?dl=03');
          aud.crossOrigin = true;
          aud.onloadedmetadata = function() {
          	var ctx = new (window.AudioContext || window.webkitAudioContext)(),
            src = ctx.createMediaElementSource(this),
            streamNode = ctx.createMediaStreamDestination();
            src.connect(streamNode);
            callback(streamNode.stream)
          };
          aud.play();
        }
        #starter ~ *{
            visibility: hidden;
        }
        <button id="starter">start</button>
        <label>volume in: <input type="range" min="0" max="1" value="1" step="0.05" id="vol_in"></label><br>
        <label>volume out: <input type="range" min="0" max="1" value="1" step="0.05" id="vol_out"></label><br>
        <canvas id="analyser"></canvas>

        1. 您想要从MediaElement控制输入卷: 要获得跨浏览器体验,您必须聆听元素的volumechange,并在 3中添加gainNode_in。
        2. starter.onclick = function(){
            this.parentNode.removeChild(this);
            getStream(onstream);
          };
          
          function onstream(stream) {
            var audio = document.querySelector('audio');
            audio.srcObject = stream;
            startUserMedia(stream, audio);
            audio.play();
          }
          
          
          function startUserMedia(stream, audio) {
            var canvas, ctx, again, fbc_array, bars = 100,
              bar_x, bar_width, bar_height;
            var context = new (window.AudioContext || window.webkitAudioContext)();
            var analyser = context.createAnalyser(),
            source = context.createMediaStreamSource(stream),
            gainNode = context.createGain();
            
            audio.onvolumechange = function(){
              gainNode.gain.value = this.volume;
            };
            
            source.connect(gainNode);
            gainNode.connect(analyser);
          
            canvas = document.getElementById("analyser");
            ctx = canvas.getContext("2d");
          
            frameLooper();
            function frameLooper() {
              window.requestAnimationFrame(frameLooper);
              fbc_array = new Uint8Array(analyser.frequencyBinCount);
              analyser.getByteFrequencyData(fbc_array);
              ctx.clearRect(0, 0, canvas.width, canvas.height);
              ctx.fillStyle = "rgb(30, 180, 255)";
              for (var i = 0; i < bars; i++) {
                bar_x = i * 3;
                bar_width = 2;
                bar_height = -(fbc_array[i] / 2);
                ctx.fillRect(bar_x, canvas.height, bar_width, bar_height);
              }
            }
          }
          
          // Snippet way to get a MediaStream
          function getStream(callback) {
            var aud = new Audio('https://dl.dropboxusercontent.com/s/8c9m92u1euqnkaz/GershwinWhiteman-RhapsodyInBluePart1.mp3?dl=03');
            aud.crossOrigin = true;
            aud.onloadedmetadata = function() {
            	var ctx = new (window.AudioContext || window.webkitAudioContext)(),
              src = ctx.createMediaElementSource(this),
              streamNode = ctx.createMediaStreamDestination();
              src.connect(streamNode);
              callback(streamNode.stream)
            };
            aud.play();
          }
          #starter ~ *{
              visibility: hidden;
          } 
          <button id="starter">start</button>
          <audio controls></audio>
          <canvas id="analyser"></canvas>