我正在制作一个应用程序,我希望用户使用他们的麦克风(在他们的手机上),并能够在游戏大厅中相互交谈。然而,这已被证明是困难的。
我正在使用Node JS socket io和socket io stream
在我的客户端上我正在使用音频api来输入我的麦克风(我并不是真的很担心这一点,因为我将把它变成一个Native IOS应用程序)
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
if (navigator.getUserMedia) {
navigator.getUserMedia (
// constraints
{
video: false,
audio: true
},
function(localMediaStream) {
var video = document.querySelector('audio');
video.src = window.URL.createObjectURL(localMediaStream);
lcm = localMediaStream;
var audioContext = window.AudioContext;
var context = new audioContext();
var audioInput = context.createMediaStreamSource(localMediaStream);
var bufferSize = 2048;
// create a javascript node
var recorder = context.createScriptProcessor(bufferSize, 1, 1);
// specify the processing function
recorder.onaudioprocess = recorderProcess;
// connect stream to our recorder
audioInput.connect(recorder);
// connect our recorder to the previous destination
recorder.connect(context.destination);
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
$("video").remove();
alert("@#");
}
);
} else {
console.log("getUserMedia not supported");
}
function recorderProcess(e) {
var left = e.inputBuffer.getChannelData(0);
window.stream.write(convertFloat32ToInt16(left));
//var f = $("#aud").attr("src");
var src = window.URL.createObjectURL(lcm);
ss(socket).emit('file', src, {size: src.size});
ss.createBlobReadStream(src).pipe(window.stream);
//ss.createReadStream(f).pipe(widnow.stream);
}
function convertFloat32ToInt16(buffer)
{
l = buffer.length;
buf = new Int16Array(l);
while (l--) {
buf[l] = Math.min(1, buffer[l])*0x7FFF;
}
return buf.buffer;
}
});
ss(socket).on('back', function(stream, data) {
//console.log(stream);
var video = document.querySelector('audio');
video.src = window.URL.createObjectURL(stream);
console.log("getting mic data");
});
我可以成功地听我的话在麦克风上说话。我正在使用流套接字来创建一个blob以上传到我的服务器......
index.ss(socket).on('file', function(stream, data) {
console.log("getting stream");
var filename = index.path.basename(data.name);
//var myfs = index.fs.createWriteStream(filename);
var fileWriter = new index.wav.FileWriter('demo.wav', {
channels: 1,
sampleRate: 48000,
bitDepth: 16
});
var streams = index.ss.createStream();
streams.pipe(fileWriter);
index.ss(socket).emit('back', fileWriter, {size: fileWriter.size});
});
我无法将流写入文件甚至临时缓冲区,然后流回客户端,以便我可以播放或者#34; stream"音频实时。一段时间后,服务器崩溃,说管道不可写。
还有其他人遇到过这个吗?
答案 0 :(得分:1)
使用SFMediaStream library,您可以使用socket.io和Nodejs服务器从浏览器实时流式传输麦克风。但是该库在发布到生产环境之前仍需要一些改进。
对于演示者
var mySocket = io("/", {transports:['websocket']});
// Set latency to 100ms (Equal with streamer)
var presenterMedia = new ScarletsMediaPresenter({
audio:{
channelCount:1,
echoCancellation: false
}
}, 100);
// Every new client streamer must receive this header buffer data
presenterMedia.onRecordingReady = function(packet){
mySocket.emit('bufferHeader', packet);
}
// Send buffer to the server
presenterMedia.onBufferProcess = function(streamData){
mySocket.emit('stream', streamData);
}
presenterMedia.startRecording();
对于流光
var mySocket = io("/", {transports:['websocket']});
// Set latency to 100ms (Equal with presenter)
var audioStreamer = new ScarletsAudioBufferStreamer(100);
audioStreamer.playStream();
// Buffer header must be received first
mySocket.on('bufferHeader', function(packet){
audioStreamer.setBufferHeader(packet);
});
// Receive buffer and play it
mySocket.on('stream', function(packet){
// audioStreamer.realtimeBufferPlay(packet);
audioStreamer.receiveBuffer(packet);
});
// Request buffer header
mySocket.emit('requestBufferHeader', '');
或者您可以使用this example
从本地主机对其进行测试