如何多次播放音频文件
How to play an audio file multiple times
游戏中发生碰撞时,我都想发出碰撞声。为此,如何多次运行音频文件?
下面是一个示例,其中音乐不断播放:示例。但是如何在 If 条件下多次播放小型 mp3 文件?
这实际上是一个大问题。我建议使用库。
Web 音频 API 可以说是在浏览器中播放音频的最佳方式。这是一篇关于如何使用它的好文章。但不幸的是,它并非在每个浏览器上都可用,这意味着您需要某种回退。Chrome、Safari 和 Firefox 都支持此功能。IE支持即将到来,但尚不存在。
这是一个图书馆
(function(global) {
var webAudioAPI = window.AudioContext || window.webkitAudioContext || window.mozAudioContext;
// To play a sound, simply call audio.playSound(id), where id is
// one of the keys of the g_sound_files array, e.g. "damage".
// options:
// startedOnTouchCallback: on iOS no sounds can be played unless at least one is first initiated during
// a use gesture. If a function is attached here it will be called when that user gesture has happened.
// This is useful for situtations where sounds 'should' start right from the beginning
// even if the player as not touched the screen. In that case we put up a message, "touch the screen"
// and remove that message when we get this callback
//
// callback: called when all the sounds have loaded.
var AudioManager = function(sounds, options) {
options = options || {};
var g_context;
var g_audioMgr;
var g_soundBank = {};
var g_canPlay = false;
var g_canPlayOgg;
var g_canPlayMp3;
var g_canPlayWav;
var g_canPlayAif;
var g_createFromFileFn;
var changeExt = function(filename, ext) {
return filename.substring(0, filename.length - 3) + ext;
};
this.needUserGesture = (function() {
var iOS = ( navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false );
var needUserGesture = iOS;
return function() {
return needUserGesture;
};
}());
var WebAudioBuffer = function() {
};
WebAudioBuffer.prototype.play = function(opt_when, opt_loop) {
if (!this.buffer) {
console.log(this.name, " not loaded");
return;
}
var src = g_context.createBufferSource();
src.buffer = this.buffer;
src.loop = opt_loop || false;
src.connect(g_context.destination);
if (src.start) {
src.start(opt_when);
} else {
src.noteOn(opt_when);
}
return src;
};
function WebAudioSound(name, filename, samples, opt_callback) {
this.name = name;
var that = this;
var req = new XMLHttpRequest();
req.open("GET", filename, true);
req.responseType = "arraybuffer";
req.onload = function() {
g_context.decodeAudioData(req.response, function onSuccess(decodedBuffer) {
// Decoding was successful, do something useful with the audio buffer
that.buffer = decodedBuffer;
if (opt_callback) {
opt_callback(false);
}
}, function onFailure() {
console.error("failed to decoding audio buffer: " + filename);
if (opt_callback) {
opt_callback(true);
}
});
}
req.addEventListener("error", function(e) {
console.error("failed to load:", filename, " : ", e.target.status);
}, false);
req.send();
}
WebAudioSound.prototype = new WebAudioBuffer();
function AudioTagSound(name, filename, samples, opt_callback) {
this.waiting_on_load = samples;
this.samples = samples || 1;
this.name = name;
this.play_idx = 0;
this.audio = {};
for (var i = 0; i < samples; i++) {
var audio = new Audio();
var that = this;
var checkCallback = function(err) {
that.waiting_on_load--;
if (opt_callback) {
opt_callback(err);
}
};
audio.addEventListener("canplaythrough", function() {
checkCallback(false);
}, false);
audio.src = filename;
audio.onerror = function() {
checkCallback(true);
};
audio.load();
this.audio[i] = audio;
}
};
AudioTagSound.prototype.play = function(opt_when, opt_loop) {
if (this.waiting_on_load > 0) {
console.log(this.name, " not loaded");
return;
}
this.play_idx = (this.play_idx + 1) % this.samples;
var a = this.audio[this.play_idx];
// console.log(this.name, ":", this.play_idx, ":", a.src);
var b = new Audio();
b.src = a.src;
// TODO: use when
b.addEventListener("canplaythrough", function() {
b.play();
}, false);
b.load();
};
var handleError = function(filename, audio) {
return function(e) {
console.error("can't load ", filename);
}
};
this.playSound = function(name, opt_when, opt_loop) {
if (!g_canPlay)
return;
var sound = g_soundBank[name];
if (!sound) {
console.error("audio: '" + name + "' not known.");
return;
}
return sound.play(opt_when, opt_loop);
}.bind(this);
this.getTime = function() {
return g_context ? g_context.currentTime : Date.now() * 0.001;
}.bind(this);
// on iOS and possibly other devices you can't play any
// sounds in the browser unless you first play a sound
// in response to a user gesture. So, make something
// to respond to a user gesture.
var setupGesture = function() {
if (this.needUserGesture()) {
var count = 0;
var elem = window;
var that = this;
var eventNames = ['touchstart', 'mousedown'];
var playSoundToStartAudio = function() {
++count;
if (count < 3) {
// just playing any sound does not seem to work.
var source = g_context.createOscillator();
var gain = g_context.createGain();
source.frequency.value = 440;
source.connect(gain);
gain.gain.value = 0;
gain.connect(g_context.destination);
if (source.start) {
source.start(0);
} else {
source.noteOn(0);
}
setTimeout(function() {
source.disconnect();
}, 100);
}
if (count == 3) {
for (var ii = 0; ii < eventNames.length; ++ii) {
elem.removeEventListener(eventNames[ii], playSoundToStartAudio, false);
}
if (options.startedOnTouchCallback) {
options.startedOnTouchCallback();
}
}
}
for (var ii = 0; ii < eventNames.length; ++ii) {
elem.addEventListener(eventNames[ii], playSoundToStartAudio, false);
}
}
}.bind(this);
this.loadSound = function(soundName, filename, samples, opt_callback) {
var ext = filename.substring(filename.length - 3);
if (ext == 'ogg' && !g_canPlayOgg) {
filename = changeExt(filename, "mp3");
} else if (ext == 'mp3' && !g_canPlayMp3) {
filename = changeExt(filename, "ogg");
}
var s = new g_createFromFileFn(soundName, filename, samples, opt_callback);
g_soundBank[soundName] = s;
return s;
}.bind(this);
this.init = function(sounds) {
var a = new Audio()
g_canPlayOgg = a.canPlayType("audio/ogg");
g_canPlayMp3 = a.canPlayType("audio/mp3");
g_canPlayWav = a.canPlayType("audio/wav");
g_canPlayAif = a.canPlayType("audio/aif") || a.canPlayType("audio/aiff");
g_canPlay = g_canPlayOgg || g_canPlayMp3;
if (!g_canPlay)
return;
if (webAudioAPI) {
console.log("Using Web Audio API");
g_context = new webAudioAPI();
if (!g_context.createGain) { g_context.createGain = g_context.createGainNode.bind(g_context); }
g_createFromFileFn = WebAudioSound;
} else {
console.log("Using Audio Tag");
g_createFromFileFn = AudioTagSound;
}
var soundsPending = 1;
var soundsLoaded = function() {
--soundsPending;
if (soundsPending == 0 && options.callback) {
options.callback();
}
};
if (sounds) {
Object.keys(sounds).forEach(function(sound) {
var data = sounds[sound];
++soundsPending;
this.loadSound(sound, data.filename, data.samples, soundsLoaded);
}.bind(this));
}
// so that we generate a callback even if there are no sounds.
// That way users don't have to restructure their code if they have no sounds or if they
// disable sounds by passing none in.
setTimeout(soundsLoaded, 0);
if (webAudioAPI) {
setupGesture();
}
}.bind(this);
this.init(sounds);
this.getSoundIds = function() {
return Object.keys(g_soundBank);
};
};
AudioManager.hasWebAudio = function() {
return webAudioAPI !== undefined;
};
global.AudioManager = AudioManager;
}(this));
你可以在这里DL它,这里有一个实时样本(http://greggman.github.io/doodles/audio.html);
要使用它,请将其与 <script src="audio.js"></script>
.
然后给它一个这样的声音列表
var audioMgr = new AudioManager({
fire: { filename: "assets/fire.ogg", samples: 8, },
explosion: { filename: "assets/explosion.ogg", samples: 6, },
hitshield: { filename: "assets/hitshield.ogg", samples: 6, },
launch: { filename: "assets/launch.ogg", samples: 2, },
gameover: { filename: "assets/gameover.ogg", samples: 1, },
play: { filename: "assets/play.ogg", samples: 1, },
});
之后,您可以播放声音
audioMgr.playSound('explosion');
audioMgr.playSound('fire');
等。。。
samples
是您希望如何同时播放该声音。对于任何支持 Web 音频 API 的浏览器,都不需要这样做。换句话说,它只适用于IE。
另请注意,据我所知,Firefox不支持MP3,因此您需要为其提供.ogg
文件。相反,Safari 不支持 .ogg
. 库处理加载.mp3
或.ogg
文件,而不考虑您在初始化库时指定的内容。换句话说,如果您输入filename: "foo.mp3"
库将尝试加载foo.mp3
或foo.ogg
,具体取决于您所在的浏览器是否支持其中之一。
我建议您更改碰撞逻辑以发出新的事件风格,然后您可以捕获该事件以触发播放音频调用。 下面显示了一个模拟碰撞,它调度新的发出事件,这些事件被监听以播放音频。
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
</head>
<body>
<script>
var sound_on_click = (function() {
// --- setup audio logic
var array_audio_files = []; // array to hold list available media filenames
array_audio_files.push("awesome_blip.mp3");
array_audio_files.push("smash_crunch.wav");
array_audio_files.push("wild_screech.wav");
var play_sound = function (given_index) {
var cool_tune = new Audio(array_audio_files[given_index]);
cool_tune.play();
};
// --- event emit
var event_collision = new Event("see_a_collision"); // define new event type
document.addEventListener("see_a_collision", function(e) {
// randomly pick a media file from all available
var index_media_file = Math.floor(Math.random() * array_audio_files.length);
console.log("about to play sound ", index_media_file);
play_sound(index_media_file);
});
// --- below is a mock up of some collision condition
(function mock_collision(){
document.dispatchEvent(event_collision); // collision happened so emit event
setTimeout(mock_collision, 2000); // launch every x milliseconds
}());
}());
</script>
</body>
</html>
- 如何播放部分音频文件
- javascript:发送带有音频文件的POST,然后重定向到新页面
- Javascript::通过HTML5音频播放器播放列表播放多个音频文件
- 获取 HTML 5 音频控制文件位置并在不使用 id 选择器的情况下更新 src
- 正在预加载jPlayer要使用的音频文件
- HTML 5<音频>-在特定时间点播放文件
- 将Base64音频文件Mp3解码为可播放的Mp3
- 更改音频速度,然后另存为新文件
- Blueimp jQuery文件上传音频/视频限制
- 在播放完成之前再次单击播放音频文件
- 在 Windows 现代(地铁)UI 应用中存储图像文件和音频文件的方法是什么?
- 在 JavaScript 中对音频文件进行计时不准确
- 如何在html5/JS中只预加载音频文件的一部分
- 单个页面上的多个音频文件javascript
- 如何将timbre.js缓冲区导出为.wav或任何其他音频文件格式
- JavaScript/HTML5音频:在Android Chrome中播放用户通过文件选择器加载的mp3文件
- 从PHP脚本获取音频文件时,更改JavaScript音频对象的当前时间
- 如何使用Node.js将音频样本数组写入wav文件
- 如何用一个html5音频播放器播放多个音频文件
- 可以't在手把文件中设置音频src