我想在游戏中发生碰撞时发出碰撞声。为此,如何多次运行音频文件?
以下是音乐持续播放的示例:Example。但是如何用If条件多次播放小型mp3文件?
答案 0 :(得分:1)
这实际上是一个很大的问题。我建议使用图书馆。
Web Audio API可以说是在浏览器中播放音频的最佳方式。 Here's a good article on how to use it。但不幸的是,它并不适用于所有浏览器,这意味着您需要某种后备。它受Chrome,Safari和Firefox支持。 IE支持即将到来但尚不存在。
这是一个图书馆
(function(global) {
var webAudioAPI = window.AudioContext || window.webkitAudioContext || window.mozAudioContext;
// To play a sound, simply call audio.playSound(id), where id is
// one of the keys of the g_sound_files array, e.g. "damage".
// options:
// startedOnTouchCallback: on iOS no sounds can be played unless at least one is first initiated during
// a use gesture. If a function is attached here it will be called when that user gesture has happened.
// This is useful for situtations where sounds 'should' start right from the beginning
// even if the player as not touched the screen. In that case we put up a message, "touch the screen"
// and remove that message when we get this callback
//
// callback: called when all the sounds have loaded.
var AudioManager = function(sounds, options) {
options = options || {};
var g_context;
var g_audioMgr;
var g_soundBank = {};
var g_canPlay = false;
var g_canPlayOgg;
var g_canPlayMp3;
var g_canPlayWav;
var g_canPlayAif;
var g_createFromFileFn;
var changeExt = function(filename, ext) {
return filename.substring(0, filename.length - 3) + ext;
};
this.needUserGesture = (function() {
var iOS = ( navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false );
var needUserGesture = iOS;
return function() {
return needUserGesture;
};
}());
var WebAudioBuffer = function() {
};
WebAudioBuffer.prototype.play = function(opt_when, opt_loop) {
if (!this.buffer) {
console.log(this.name, " not loaded");
return;
}
var src = g_context.createBufferSource();
src.buffer = this.buffer;
src.loop = opt_loop || false;
src.connect(g_context.destination);
if (src.start) {
src.start(opt_when);
} else {
src.noteOn(opt_when);
}
return src;
};
function WebAudioSound(name, filename, samples, opt_callback) {
this.name = name;
var that = this;
var req = new XMLHttpRequest();
req.open("GET", filename, true);
req.responseType = "arraybuffer";
req.onload = function() {
g_context.decodeAudioData(req.response, function onSuccess(decodedBuffer) {
// Decoding was successful, do something useful with the audio buffer
that.buffer = decodedBuffer;
if (opt_callback) {
opt_callback(false);
}
}, function onFailure() {
console.error("failed to decoding audio buffer: " + filename);
if (opt_callback) {
opt_callback(true);
}
});
}
req.addEventListener("error", function(e) {
console.error("failed to load:", filename, " : ", e.target.status);
}, false);
req.send();
}
WebAudioSound.prototype = new WebAudioBuffer();
function AudioTagSound(name, filename, samples, opt_callback) {
this.waiting_on_load = samples;
this.samples = samples || 1;
this.name = name;
this.play_idx = 0;
this.audio = {};
for (var i = 0; i < samples; i++) {
var audio = new Audio();
var that = this;
var checkCallback = function(err) {
that.waiting_on_load--;
if (opt_callback) {
opt_callback(err);
}
};
audio.addEventListener("canplaythrough", function() {
checkCallback(false);
}, false);
audio.src = filename;
audio.onerror = function() {
checkCallback(true);
};
audio.load();
this.audio[i] = audio;
}
};
AudioTagSound.prototype.play = function(opt_when, opt_loop) {
if (this.waiting_on_load > 0) {
console.log(this.name, " not loaded");
return;
}
this.play_idx = (this.play_idx + 1) % this.samples;
var a = this.audio[this.play_idx];
// console.log(this.name, ":", this.play_idx, ":", a.src);
var b = new Audio();
b.src = a.src;
// TODO: use when
b.addEventListener("canplaythrough", function() {
b.play();
}, false);
b.load();
};
var handleError = function(filename, audio) {
return function(e) {
console.error("can't load ", filename);
}
};
this.playSound = function(name, opt_when, opt_loop) {
if (!g_canPlay)
return;
var sound = g_soundBank[name];
if (!sound) {
console.error("audio: '" + name + "' not known.");
return;
}
return sound.play(opt_when, opt_loop);
}.bind(this);
this.getTime = function() {
return g_context ? g_context.currentTime : Date.now() * 0.001;
}.bind(this);
// on iOS and possibly other devices you can't play any
// sounds in the browser unless you first play a sound
// in response to a user gesture. So, make something
// to respond to a user gesture.
var setupGesture = function() {
if (this.needUserGesture()) {
var count = 0;
var elem = window;
var that = this;
var eventNames = ['touchstart', 'mousedown'];
var playSoundToStartAudio = function() {
++count;
if (count < 3) {
// just playing any sound does not seem to work.
var source = g_context.createOscillator();
var gain = g_context.createGain();
source.frequency.value = 440;
source.connect(gain);
gain.gain.value = 0;
gain.connect(g_context.destination);
if (source.start) {
source.start(0);
} else {
source.noteOn(0);
}
setTimeout(function() {
source.disconnect();
}, 100);
}
if (count == 3) {
for (var ii = 0; ii < eventNames.length; ++ii) {
elem.removeEventListener(eventNames[ii], playSoundToStartAudio, false);
}
if (options.startedOnTouchCallback) {
options.startedOnTouchCallback();
}
}
}
for (var ii = 0; ii < eventNames.length; ++ii) {
elem.addEventListener(eventNames[ii], playSoundToStartAudio, false);
}
}
}.bind(this);
this.loadSound = function(soundName, filename, samples, opt_callback) {
var ext = filename.substring(filename.length - 3);
if (ext == 'ogg' && !g_canPlayOgg) {
filename = changeExt(filename, "mp3");
} else if (ext == 'mp3' && !g_canPlayMp3) {
filename = changeExt(filename, "ogg");
}
var s = new g_createFromFileFn(soundName, filename, samples, opt_callback);
g_soundBank[soundName] = s;
return s;
}.bind(this);
this.init = function(sounds) {
var a = new Audio()
g_canPlayOgg = a.canPlayType("audio/ogg");
g_canPlayMp3 = a.canPlayType("audio/mp3");
g_canPlayWav = a.canPlayType("audio/wav");
g_canPlayAif = a.canPlayType("audio/aif") || a.canPlayType("audio/aiff");
g_canPlay = g_canPlayOgg || g_canPlayMp3;
if (!g_canPlay)
return;
if (webAudioAPI) {
console.log("Using Web Audio API");
g_context = new webAudioAPI();
if (!g_context.createGain) { g_context.createGain = g_context.createGainNode.bind(g_context); }
g_createFromFileFn = WebAudioSound;
} else {
console.log("Using Audio Tag");
g_createFromFileFn = AudioTagSound;
}
var soundsPending = 1;
var soundsLoaded = function() {
--soundsPending;
if (soundsPending == 0 && options.callback) {
options.callback();
}
};
if (sounds) {
Object.keys(sounds).forEach(function(sound) {
var data = sounds[sound];
++soundsPending;
this.loadSound(sound, data.filename, data.samples, soundsLoaded);
}.bind(this));
}
// so that we generate a callback even if there are no sounds.
// That way users don't have to restructure their code if they have no sounds or if they
// disable sounds by passing none in.
setTimeout(soundsLoaded, 0);
if (webAudioAPI) {
setupGesture();
}
}.bind(this);
this.init(sounds);
this.getSoundIds = function() {
return Object.keys(g_soundBank);
};
};
AudioManager.hasWebAudio = function() {
return webAudioAPI !== undefined;
};
global.AudioManager = AudioManager;
}(this));
You can DL it here此处有一个实时示例(http://greggman.github.io/doodles/audio.html);
要使用此功能,请将其包含在<script src="audio.js"></script>
。
然后给它一个像这样的声音列表
var audioMgr = new AudioManager({
fire: { filename: "assets/fire.ogg", samples: 8, },
explosion: { filename: "assets/explosion.ogg", samples: 6, },
hitshield: { filename: "assets/hitshield.ogg", samples: 6, },
launch: { filename: "assets/launch.ogg", samples: 2, },
gameover: { filename: "assets/gameover.ogg", samples: 1, },
play: { filename: "assets/play.ogg", samples: 1, },
});
之后你可以用
播放声音 audioMgr.playSound('explosion');
audioMgr.playSound('fire');
等等......
samples
是您想要同时播放的声音的方式。任何支持Web Audio API的浏览器都不需要这样做。换句话说,IE只需要它。
另请注意,据我所知,Firefox并不支持MP3,因此您需要为其提供.ogg
个文件。相反,Safari不支持.ogg
。无论您在初始化库时指定了什么,库都会处理加载.mp3
或.ogg
文件。换句话说,如果您放置filename: "foo.mp3"
,则图书馆会尝试加载foo.mp3
或foo.ogg
,具体取决于您所使用的浏览器是否支持其中一个。
答案 1 :(得分:0)
我建议您更改碰撞逻辑以发出新的事件风格,然后您可以捕获该事件以触发播放音频呼叫。下面显示了一个模拟碰撞,它调度新的发射事件,这些事件被听取来播放音频。
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
</head>
<body>
<script>
var sound_on_click = (function() {
// --- setup audio logic
var array_audio_files = []; // array to hold list available media filenames
array_audio_files.push("awesome_blip.mp3");
array_audio_files.push("smash_crunch.wav");
array_audio_files.push("wild_screech.wav");
var play_sound = function (given_index) {
var cool_tune = new Audio(array_audio_files[given_index]);
cool_tune.play();
};
// --- event emit
var event_collision = new Event("see_a_collision"); // define new event type
document.addEventListener("see_a_collision", function(e) {
// randomly pick a media file from all available
var index_media_file = Math.floor(Math.random() * array_audio_files.length);
console.log("about to play sound ", index_media_file);
play_sound(index_media_file);
});
// --- below is a mock up of some collision condition
(function mock_collision(){
document.dispatchEvent(event_collision); // collision happened so emit event
setTimeout(mock_collision, 2000); // launch every x milliseconds
}());
}());
</script>
</body>
</html>