我正在构建一个视频应用程序。我使用“getUserMedia”来访问麦克风。我使用以下代码将PCM数据发送到服务器:
var bufferSize = 4096;
audioContext = new (window.AudioContext || window.webkitAudioContext || window.mozAudioContext)(); //Crear conteto de audio
volume = audioContext.createGain();
audioInput = audioContext.createMediaStreamSource( e );
audioInput.connect( volume );
processor = audioContext.createScriptProcessor( bufferSize, 1, 1 );
processor.onaudioprocess = function ( e ){
if( e === undefined ) return;
var left = e.inputBuffer.getChannelData( 0 );
if( (left.buffer instanceof ArrayBuffer) && (left.length > 0) && (left.buffer !== undefined) && (left[0] != 0)){
if( ws ){
if( ws.readyState == WebSocket.OPEN ){
ws.send( left.buffer );
}//Fin if
}//Fin ws
}//Fin if
}//Fin function
volume.connect( processor );
processor.connect( audioContext.destination );
完美无缺。但是,我无法播放服务器上收到的音频。客户端向服务器发送包含从本地麦克风捕获的PCM音频数据的Float32Array。这是我在服务器接收PCM数据时播放音频的代码:
function PlayAudio( param ){
// creates the audio context
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
// Create an empty two second stereo buffer at the
// sample rate of the AudioContext
var frameCount = audioCtx.sampleRate * 2.0;
var bufferSize = 4096;
var myArrayBuffer = audioCtx.createBuffer( 1, bufferSize, audioCtx.sampleRate);
var nowBuffering = myArrayBuffer.getChannelData(0);
for (var i = 0 , len = param.length ; i < len ; i++) {
nowBuffering[i] = param[i];
}//fin for
//create a buffer source to play the audio
var source = audioCtx.createBufferSource();
//Copy the buffer
source.buffer = myArrayBuffer;
//Connect the audio input
source.connect(audioCtx.destination);
//Start playing
source.start();
}
我只用一个客户端测试服务器。服务器仅“回应”从客户端接收的PCM数据。正确发送和接收PCM数据。当我播放音频时,我只是听到“噪音”而不是我录制的声音。
我无法弄清楚我做错了什么。感谢您提供任何帮助。