根据互联网上的大量资源,我试图构建一个简单的网页,在这里我可以去录制一些东西(我的声音),然后从录音中制作一个mp3文件,最后将该文件上传到服务器。
这时我既可以录音,也可以播放,但是我还没有上传,似乎我什至无法在本地制作mp3文件。 有人可以告诉我我做错了什么,还是顺序错了?
下面是我现在拥有的所有代码。
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<div>
<h2>Audio record and playback</h2>
<p>
<button id=startRecord><h3>Start</h3></button>
<button id=stopRecord disabled><h3>Stop</h3></button>
<audio id="player" controls></audio>
<a id=audioDownload></a>
</p>
</div>
<script>
var player = document.getElementById('player');
var handleSuccess = function(stream) {
rec = new MediaRecorder(stream);
rec.ondataavailable = e => {
audioChunks.push(e.data);
if (rec.state == "inactive") {
let blob = new Blob(audioChunks,{type:'audio/x-mpeg-3'});
player.src = URL.createObjectURL(blob);
player.controls=true;
player.autoplay=true;
// audioDownload.href = player.src;
// audioDownload.download = 'sound.data';
// audioDownload.innerHTML = 'Download';
mp3Build();
}
}
player.src = stream;
};
navigator.mediaDevices.getUserMedia({audio:true/*, video: false */})
.then(handleSuccess);
startRecord.onclick = e => {
startRecord.disabled = true;
stopRecord.disabled=false;
audioChunks = [];
rec.start();
}
stopRecord.onclick = e => {
startRecord.disabled = false;
stopRecord.disabled=true;
rec.stop();
}
var ffmpeg = require('ffmpeg');
function mp3Build() {
try {
var process = new ffmpeg('sound.data');
process.then(function (audio) {
// Callback mode.
audio.fnExtractSoundToMP3('sound.mp3', function (error, file) {
if (!error) {
console.log('Audio file: ' + file);
audioDownload.href = player.src;
audioDownload.download = 'sound.mp3';
audioDownload.innerHTML = 'Download';
} else {
console.log('Error-fnExtractSoundToMP3: ' + error);
}
});
}, function (err) {
console.log('Error: ' + err);
});
} catch (e) {
console.log(e.code);
console.log(e.msg);
}
}
</script>
</body>
</html>
当我尝试使用Web控制台中的调试器进行调查并查看正在发生什么时;在行上:
var process = new ffmpeg('sound.data');
我收到此消息:
Paused on exception
TypeError ffmpeg is not a contructor.
然后在线:
var ffmpeg = require('ffmpeg');
我收到此消息:
Paused on exception
ReferenceError require is not defined.
当我观看ffmpeg表达式时,我可以看到:
ffmpeg: undefined
经过进一步调查,并使用browserify,我使用以下代码:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<div>
<h2>Audio record and playback</h2>
<p>
<button id=startRecord><h3>Start</h3></button>
<button id=stopRecord disabled><h3>Stop</h3></button>
<audio id="player" controls></audio>
<a id=audioDownload></a>
</p>
</div>
<script src="bundle.js"></script>
<script>
var player = document.getElementById('player');
var handleSuccess = function(stream) {
rec = new MediaRecorder(stream);
rec.ondataavailable = e => {
if (rec.state == "inactive") {
let blob = new Blob(audioChunks,{type:'audio/x-mpeg-3'});
//player.src = URL.createObjectURL(blob);
//player.srcObject = URL.createObjectURL(blob);
//player.srcObject = blob;
player.srcObject = stream;
player.controls=true;
player.autoplay=true;
// audioDownload.href = player.src;
// audioDownload.download = 'sound.data';
// audioDownload.innerHTML = 'Download';
mp3Build();
}
}
//player.src = stream;
player.srcObject = stream;
};
navigator.mediaDevices.getUserMedia({audio:true/*, video: false */})
.then(handleSuccess);
startRecord.onclick = e => {
startRecord.disabled = true;
stopRecord.disabled=false;
audioChunks = [];
rec.start();
}
stopRecord.onclick = e => {
startRecord.disabled = false;
stopRecord.disabled=true;
rec.stop();
}
var ffmpeg = require('ffmpeg');
function mp3Build() {
try {
var process = new ffmpeg('sound.data');
process.then(function (audio) {
// Callback mode.
audio.fnExtractSoundToMP3('sound.mp3', function (error, file) {
if (!error) {
console.log('Audio file: ' + file);
//audioDownload.href = player.src;
audioDownload.href = player.srcObject;
audioDownload.download = 'sound.mp3';
audioDownload.innerHTML = 'Download';
} else {
console.log('Error-fnExtractSoundToMP3: ' + error);
}
});
}, function (err) {
console.log('Error: ' + err);
});
} catch (e) {
console.log(e.code);
console.log(e.msg);
}
}
</script>
</body>
</html>
解决了以下问题:
the expression ffmpeg being: undefined
但是播放不再起作用。我可能没有对 player.srcObject 做正确的事情,也许还有其他一些事情。
当我使用此行时:
player.srcObject = URL.createObjectURL(blob);
我收到此消息:
Paused on exception
TypeError: Value being assigned to HTMLMediaElement.srcObject is not an object.
当我使用此行时:
player.srcObject = blob;
我收到此消息:
Paused on exception
TypeError: Value being assigned to HTMLMediaElement.srcObject does not implement interface MediaStream.
最后,如果我使用这个:
player.srcObject = stream;
我没有收到任何错误消息,但是语音记录仍然无法正常工作。
答案 0 :(得分:1)
require
在浏览器中不起作用。
您应使用“ browserify”,这是其github页面https://github.com/browserify/browserify
最好(如果没有必要)在新的Blob中使用浏览器支持的MediaRecorder音频类型,请检查此链接https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/isTypeSupported%22
最后,您的代码中不需要player.src = stream;
(我说的是第二个代码),加上检查此https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/srcObject#Basic_example会给您带来错误
这是一个小编辑
var handleSuccess = function(stream) {
rec = new MediaRecorder(stream);
rec.ondataavailable = e => {
if (rec.state == "inactive") {
let blob = new Blob(audioChunks,{type:'audio/x-mpeg-3'});
player.src = URL.createObjectURL(blob);
player.controls=true;
player.autoplay=true;
audioDownload.href = player.src;
audioDownload.download = 'sound.data';
audioDownload.innerHTML = 'Download';
mp3Build();
}
}
//No need to put anything here
};