I have a site need auto play sound notification.
I need to trigger browser sound permission, anyone know how to use javascript to trigger this?
new Audio('notification.mp3').play();
You can use the AudioContext() interface to play it.
window.onload = function(){
var url = '<AUDIO_URL>.mp3';
window.AudioContext = window.AudioContext||window.webkitAudioContext; //fix up prefixing
var context = new AudioContext(); //context
var source = context.createBufferSource(); //source node
source.connect(context.destination); //connect source to speakers so we can hear it
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer'; //the response is an array of bits
request.onload = function() {
context.decodeAudioData(request.response, function(response) {
source.buffer = response;
source.start(0); //play audio immediately
source.loop = true;
}, function () { console.error('The request failed.'); } );
}
request.send();
}
navigator.getUserMedia =
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia
if (navigator.getUserMedia) {
navigator.getUserMedia(
{ audio: true },
function onSuccess(stream) {
// go play
},
function onError(error) {
}
)
} else {
}
Related
I use AudioContext to play some audios in my site.
It works fine on Chrome and Firefox, but not on Safari. On Safari stop function does not work and I get the following:
[Error] InvalidStateError: DOM Exception 11: An attempt was made to use an object that is not, or is no longer, usable.
noteOff (preload.js, line 85)
Does anyone know , how to fix this, and why this error occurrs?
function _initWebAudio(AudioContext, format, audios, callback) {
var context = new AudioContext();
preloader(audios, _preload, callback);
function _preload(asset, doneCallback) {
var request = new XMLHttpRequest();
request.open('GET', 'audio/' + asset.id + '.' + format, true);
request.responseType = 'arraybuffer';
request.onload = function () {
context.decodeAudioData(request.response, function (buffer) {
var source;
// default volume
//// support both webkitAudioContext or standard AudioContext
asset.gain = context.createGain ? context.createGain() : context.createGainNode();
asset.play = function () {
source = context.createBufferSource(); // creates a sound source
source.buffer = buffer; // tell the source which sound to play
source.connect(asset.gain); // connect the source to the context's destination (the speakers)
asset.gain.connect(context.destination);
// play the source now
// support both webkitAudioContext or standard AudioContext
source.noteOn ? source.noteOn(0) : source.start(0);
};
asset.stop = function () {
console.log(source);
source = context.createBufferSource(); // creates a sound source
source.noteOff ? source.noteOff(0) : source.stop(0);
}
asset.toggleVolume = function (muteSound) {
if (muteSound) {
asset.gain.gain.value = 0;
} else {
asset.gain.gain.value = 1;
}
}
doneCallback();
}, function (err) {
asset.play = function () {
};
doneCallback(err, asset.id);
});
};
request.onerror = function (err) {
console.log(err);
asset.play = function () {
};
doneCallback(err, asset.id);
};
// kick off load
request.send();
}
}
I'm not sure why this happened, but I solved this by using disconnect() method.
asset.stop = function () {
asset.gain.disconnect();
//source.noteOff ? source.noteOff(0) : source.stop(0);
}
I got the following code for loading and then playing a .wavfile on a Chrome browser:
var songBuffer = null;
// Fix up prefixing
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
function loadSound(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
songBuffer = buffer;
});
}
request.send();
}
// Fix up prefixing
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
function playSound(buffer) {
var source = context.createBufferSource();
// creates a sound source
source.buffer = buffer;
// tell the source which sound to play
source.connect(context.destination);
// connect the source to the context's destination (the speakers)
source.start(0);
// play the source now
}
loading is being triggered by:
<button onclick="loadSound('audio/sound.wav')">load sound</button> and the file loads fine when I generate the event.
and playing handled by : <button onclick="playSound()">play sound</button>.
console.log tells me, however:
Uncaught TypeError: Failed to set the 'buffer' property on 'AudioBufferSourceNode': The provided value is not of type 'AudioBuffer'.
what is going on?
You are not passing anything to your playSound function, which means that the line
source.buffer = buffer;
is interpreted as
source.buffer = undefined;
Since you have your songBuffer variable in a scope that is accessible to the playSound function, you can simply remove the buffer parameter and just do
source.buffer = songBuffer;
So, to wrap it up - keep it as it is, but edit the playSound function to look like this:
function playSound() {
var source = context.createBufferSource();
// creates a sound source
source.buffer = songBuffer;
// tell the source which sound to play
source.connect(context.destination);
// connect the source to the context's destination (the speakers)
source.start(0);
// play the source now
}
I'm building a canvas game using javascript and web audio. To get it to work in Firefox I have a copy of all the audio files in .ogg format. The code to load these files is below. To get the desired audio file I use ' playSound(samplebb[3], channel1); ', I have done it like this as in my app it is useful to choose the sample based on number, for example sounds can be chosen using probability and randomness.
I read on a forum "the loader will accept both [mp3 and ogg] for the same sound, just pass both paths in an array rather than a single string."
The 4th line of code is me trying this but it does not work.
Is it possible to load an alternate ogg file for each mp3 like this? (In one bufferlist) Or will I have to detect the browser and build a bufferlist of oggs if the browser is Firefox?
Thanks
function loadSounds() {
bufferLoader = new BufferLoader(audioContext,
[
['sounds/1-KICK.mp3', 'sounds/1-KICK.ogg'], //0 // Not found
'sounds/2-BASS.mp3', //1
'sounds/3-BASS2.mp3', //2
'sounds/4-BASS4.mp3' //3
// ... ... ...
],
finishedLoading
);
bufferLoader.load();
}
function finishedLoading(bufferList) {
for (var i = 0, l = bufferList.length; i < l; i += 1) {
var source = audioContext.createBufferSource();
source.buffer = bufferList[i];
source.connect(audioContext.destination);
var note = {
note: source,
ready: true
};
samplebb.push(note);
}
setTimeout(play, 1000);
}
Are you using BufferLoader from html5rocks? If so, the JS file clearly shows that it only expects strings (not arrays) as url arguments. However you can modify the class so that it works like you want. Use the following BufferLoader.loadBuffer() function instead:
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest(),
mult = typeof url != 'string',
srcInd = 0;
request.open("GET", mult ? url[srcInd++] : url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
if(!mult || srcInd == url.length) {
console.error('error decoding file data:', url);
return;
} else {
console.info('error decoding file data, trying next source');
request.open("GET", url[srcInd++], true);
return request.send();
}
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
if(!mult || srcInd == url.length) {
console.error('decodeAudioData error:', url);
return;
} else {
console.info('decodeAudioData error, trying next source');
request.open("GET", url[srcInd++], true);
return request.send();
}
}
);
}
request.onerror = function() {
if(!mult || srcInd == url.length) {
console.error('BufferLoader XHR error:', url);
return;
} else {
console.info('BufferLoader XHR error, trying next source');
request.open("GET", url[srcInd++], true);
return request.send();
}
}
request.send();
}
I'm trying to follow a tutorial online by piecing together the examples. I feel like this should be playing the mp3 file. I'm using the Chrome browser and it's up to date. I don't get any errors on the console. I'm not sure what I need to change or add to make this work.
<script type="text/javascript">
//creating an audio context
window.addEventListener('load',init);
function init()
{
try
{
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context=new AudioContext();
}
catch(e)
{
alert("Your browser doesn't support Web Audio API");
}
loadSound();
playSound();
}
//loading sound into the created audio context
function loadSound()
{
//set the audio file's URL
var audioURL='audio files/song.mp3';
//creating a new request
var request = new XMLhttpRequest();
request.open("GET",audioURL,true);
request.responseType= 'arraybuffer';
request.onLoad funtion(){
//take the audio from http request and decode it in an audio buffer
var audioBuffer = null;
context.decodeAudioData(request.response, function(buffer){ audioBuffer= buffer;});
}
request.send();
}, onError);
//playing the audio file
function playSound(buffer) {
//creating source node
var source = audioContext.createBufferSource();
//passing in file
source.buffer = audioBuffer;
//start playing
source.start(0);
}
</script>
</head>
</html>
You are using async XMLHttpRequest (note that it should be spelled by capital "H"), so most probably playSound function is called before request.onLoad (note: missing =) completes.
Try to trace execution of your script using console.log or similar to find bugs like this and use JavaScript Console to catch syntax errors.
i got this thing fixed :) i made use of audio tag along with web audio API. here's the code :
var audio = new Audio();
audio.src = 'audio files/song.mp3';
audio.controls = true;
audio.autoplay = true;
document.body.appendChild(audio);
var context = new webkitAudioContext();
var analyser = context.createAnalyser();
window.addEventListener('load', function(e) {
// Our <audio> element will be the audio source.
var source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
}, false);
thnks for trying to help :))
Is your audioURL correct?
audio files/song.mp3 Why is there an empty space?
============Edit============
<script>
//creating an audio context
var context;
var audioBuffer;
window.addEventListener('load', init);
function init()
{
try
{
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context=new AudioContext();
}
catch(e)
{
alert("Your browser doesn't support Web Audio API");
}
loadSound();
// playSound(); // comment here
}
//loading sound into the created audio context
function loadSound()
{
// set the audio file's URL
var audioURL='AllofMe.mp3';
//creating a new request
var request = new XMLHttpRequest();
request.open("GET",audioURL,true);
request.responseType= 'arraybuffer';
request.onload = function(){
//take the audio from http request and decode it in an audio buffer
context.decodeAudioData(request.response, function(buffer){
audioBuffer = buffer;
console.log(audioBuffer);
if(audioBuffer){ // check here
playSound();
}
});
};
request.send();
}
//playing the audio file
function playSound() {
//creating source node
var source = context.createBufferSource();
//passing in file
source.buffer = audioBuffer;
//start playing
source.connect(context.destination); // added
source.start(0);
console.log('playing');
}
</script>
I was looking for the solution to play mp3 on a mobile device, and found this page, I've made provided example to work with a help from here. Providing working example below:
var context;
var saved;
try {
context = new (window.AudioContext || window.webkitAudioContext)();
}
catch (e) {
console.log("Your browser doesn't support Web Audio API");
}
if (saved) {
playSound(saved);
} else {
loadSound();
}
//loading sound into the created audio context
function loadSound() {
//set the audio file's URL
var audioURL = '/path/to/file.mp3';
//creating a new request
var request = new XMLHttpRequest();
request.open('GET', audioURL, true);
request.responseType = 'arraybuffer';
request.onload = function () {
//take the audio from http request and decode it in an audio buffer
context.decodeAudioData(request.response, function (buffer) {
// save buffer, to not load again
saved = buffer;
// play sound
playSound(buffer);
});
};
request.send();
}
//playing the audio file
function playSound(buffer) {
//creating source node
var source = context.createBufferSource();
//passing in data
source.buffer = buffer;
//giving the source which sound to play
source.connect(context.destination);
//start playing
source.start(0);
}
It looks like playing files works fine on Android device, but not iOS. For that you have to follow this guide: How to: Web Audio on iOS (but use touchend event and replace noteOn method to start).
While I was trying to create a workaround for Chrome unsupporting blobs in IndexedDB I discovered that I could read an image through AJAX as an arraybuffer, store it in IndexedDB, extract it, convert it to a blob and then show it in an element using the following code:
var xhr = new XMLHttpRequest(),newphoto;
xhr.open("GET", "photo1.jpg", true);
xhr.responseType = "arraybuffer";
xhr.addEventListener("load", function () {
if (xhr.status === 200) {
newphoto = xhr.response;
/* store "newphoto" in IndexedDB */
...
}
}
document.getElementById("show_image").onclick=function() {
var store = db.transaction("files", "readonly").objectStore("files").get("image1");
store.onsuccess = function() {
var URL = window.URL || window.webkitURL;
var oMyBlob = new Blob([store.result.image], { "type" : "image\/jpg" });
var docURL = URL.createObjectURL(oMyBlob);
var elImage = document.getElementById("photo");
elImage.setAttribute("src", docURL);
URL.revokeObjectURL(docURL);
}
}
This code works fine. But if I try the same process, but this time loading a video (.mp4) I can't show it:
...
var oMyBlob = new Blob([store.result.image], { "type" : "video\/mp4" });
var docURL = URL.createObjectURL(oMyBlob);
var elVideo = document.getElementById("showvideo");
elVideo.setAttribute("src", docURL);
...
<video id="showvideo" controls ></video>
...
Even if I use xhr.responseType = "blob" and not storing the blob in IndexedDB but trying to show it immediately after loading it, it still does not works!
xhr.responseType = "blob";
xhr.addEventListener("load", function () {
if (xhr.status === 200) {
newvideo = xhr.response;
var docURL = URL.createObjectURL(newvideo);
var elVideo = document.getElementById("showvideo");
elVideo.setAttribute("src", docURL);
URL.revokeObjectURL(docURL);
}
}
The next step was trying to do the same thing for PDF files, but I'm stuck with video files!
This is a filler answer (resolved via the OP found in his comments) to prevent the question from continuing to show up under "unanswered" questions.
From the author:
OK, I solved the problem adding an event that waits for the
video/image to load before executing the revokeObjectURL method:
var elImage = document.getElementById("photo");
elImage.addEventListener("load", function (evt) { URL.revokeObjectURL(docURL); }
elImage.setAttribute("src", docURL);
I suppose the revokeObjectURL method was executing before the video
was totally loaded.