我在使用带有WebAudio的AudioContext生成MediaStream时遇到问题,以便使用WebRTC将其发送到另一个对等方。事实证明,当我生成MediaStream并尝试使用音频HTML标签或JS在本地和远程播放时,我听不到任何声音,但我确实收到了流。
这是我的代码段:
CreateTone =
function () {
var ringing = 'assets/tones/ringing.wav';
var request = new XMLHttpRequest();
request.open('GET', ringing , true);
request.responseType = 'arraybuffer';
request.onload = function () {
context.decodeAudioData(request.response, function (buffer) {
bufferAudio = buffer;
SendTone();
});
}
request.send();
};
SendTone =
function () {
var source = context.createBufferSource();
source.buffer = bufferAudio;
source.loop = true;
source.connect(context.destination);
//If I use here source.start(0); it will play
var remote = context.createMediaStreamDestination();
source.connect(remote);
var streamToSend= remote.stream;
//If I try to play this stream with an Audio element, I can't hear anything. When I send with pc.addMediaStream(streamToSend) and it's received by the other peer, it's still deaf
};
我是否错误地使用MediaStream?提前致谢