Web Audio Api - Fail to set 'buffer' property - javascript

I got the following code for loading and then playing a .wavfile on a Chrome browser:
var songBuffer = null;
// Fix up prefixing
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
function loadSound(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
songBuffer = buffer;
});
}
request.send();
}
// Fix up prefixing
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
function playSound(buffer) {
var source = context.createBufferSource();
// creates a sound source
source.buffer = buffer;
// tell the source which sound to play
source.connect(context.destination);
// connect the source to the context's destination (the speakers)
source.start(0);
// play the source now
}
loading is being triggered by:
<button onclick="loadSound('audio/sound.wav')">load sound</button> and the file loads fine when I generate the event.
and playing handled by : <button onclick="playSound()">play sound</button>.
console.log tells me, however:
Uncaught TypeError: Failed to set the 'buffer' property on 'AudioBufferSourceNode': The provided value is not of type 'AudioBuffer'.
what is going on?

You are not passing anything to your playSound function, which means that the line
source.buffer = buffer;
is interpreted as
source.buffer = undefined;
Since you have your songBuffer variable in a scope that is accessible to the playSound function, you can simply remove the buffer parameter and just do
source.buffer = songBuffer;
So, to wrap it up - keep it as it is, but edit the playSound function to look like this:
function playSound() {
var source = context.createBufferSource();
// creates a sound source
source.buffer = songBuffer;
// tell the source which sound to play
source.connect(context.destination);
// connect the source to the context's destination (the speakers)
source.start(0);
// play the source now
}

Related

Why decodeAudioData for Icecast stream only works on Desktop Chrome browser

I have a livestream that is served through Icecast as an audio/mpeg. I use the following code to grab the audio from a livestream to listen to the audio:
function playLiveAudio() {
// Fix up prefixing
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var offset = 0;
var byteOffset = 0;
var minDecodeSize = 4096; // This is your chunk size
var request = new XMLHttpRequest();
request.onprogress = function(evt)
{
if (request.response)
{
var size = request.response.length - byteOffset;
if (size < minDecodeSize) return;
// In Chrome, XHR stream mode gives text, not ArrayBuffer.
// If in Firefox, you can get an ArrayBuffer as is
var buf;
if (request.response instanceof ArrayBuffer)
buf = request.response;
else
{
ab = new ArrayBuffer(size);
buf = new Uint8Array(ab);
for (var i = 0; i < size; i++)
buf[i] = request.response.charCodeAt(i + byteOffset) & 0xff;
}
byteOffset = request.response.length;
context.decodeAudioData(ab, function(buffer) {
playSound(buffer);
}, function(error) {
console.log(error);
});
}
};
request.onloadend = function(evt) {
console.log(evt);
};
request.open('GET', liveURL, true);
request.responseType = "stream"; // 'stream' in chrome, 'moz-chunked-arraybuffer' in firefox, 'ms-stream' in IE
request.overrideMimeType('text/plain; charset=x-user-defined');
request.send(null);
function playSound(buffer) {
source = context.createBufferSource(); // creates a sound source
source.buffer = buffer; // tell the source which sound to play
source.connect(context.destination); // connect the source to the context's destination (the speakers)
source.start(offset); // play the source now
// note: on older systems, may have to use deprecated noteOn(time);
offset += buffer.duration;
}
}
I tried changing stream to moz-chunked-arraybuffer and seeing if I could manipulate the audio buffer but it doesn't seem to make a difference - I always get a decode promise rejection in Safari with no further message, or a decode audio data error in Firefox.
For some reason, it only works on Desktop Chrome - it doesn't work on any versions of Firefox, Safari or Mobile Chrome. How can decodeAudioData() be made to work on all environments? In Firefox I receive the error: cannot decodeAudioData(). I can set an audio element to livestream the URL but then there is an 8 second delay. This code achieves a 1 second delay.

Javascript audio context on Safari / IOS 15 - 14 dont work anymore

Recent, IOS/Safari upgrade seems to broke web audio api ...
Look here simple code that work before on IOS and safari but since upgrade thats dont work anymore ..by the way it work on firefox, edge, chrome.
https://www.mylooper.com/debug.html
var url = 'https://www.mylooper.com/static/btf_10_loop1.aac';
var context = new AudioContext();
var source = context.createBufferSource();
//connect it to the destination so you can hear it.
source.connect(context.destination);
/* --- load buffer --- */
var request = new XMLHttpRequest();
//open the request
request.open('GET', url, true);
//webaudio paramaters
request.responseType = 'arraybuffer';
request.onload = function() {
context.decodeAudioData(request.response, function(response) {
/* --- play the sound AFTER the buffer loaded --- */
//set the buffer to the response we just received.
source.buffer = response;
source.start(0);
source.loop = true;
}, function () { console.error('The request failed.'); } );
}
//Now that the request has been defined, actually make the request. (send it)
request.send();
</script>
Lot of library is impacted like howler js, tuna js ... but i dont know if this "bug"/"feature" may be fix one day ...
By the way, for now how do a perfect audio loop with JS now on Safari / IOS ?
Thanks a lot, this thing driving me crazy...
What do you mean by "broken"?
If it means that audio does not start playing with this warning, you should call source.start(0) after user's gesture.
The AudioContext was not allowed to start. It must be resumed (or created) after a user gesture on the page.
For Example, call request.send() in some button's onclick function.
Or, wrap all your code in some button's onclick function.
const button = document.getElementbyId("foo");
button.oncklick = () => {
var url = 'https://www.mylooper.com/static/btf_10_loop1.aac';
var context = new AudioContext();
var source = context.createBufferSource();
//connect it to the destination so you can hear it.
source.connect(context.destination);
/* --- load buffer --- */
var request = new XMLHttpRequest();
//open the request
request.open('GET', url, true);
//webaudio paramaters
request.responseType = 'arraybuffer';
request.onload = function() {
context.decodeAudioData(request.response, function(response) {
/* --- play the sound AFTER the buffer loaded --- */
//set the buffer to the response we just received.
source.buffer = response;
source.start(0);
source.loop = true;
}, function () { console.error('The request failed.'); } );
}
//Now that the request has been defined, actually make the request. (send it)
request.send();
}
This is feature described here. https://developer.chrome.com/blog/autoplay/#webaudio
So it will not be fixed.

Web Audio API - How to set gain of output but also specify the channel to output to?

My code:
// Create an AudioContext instance for this sound
var audioContext = new (window.AudioContext || window.webkitAudioContext)();
var maxChannelCount = audioContext.destination.maxChannelCount;
var gainNode = audioContext.createGain()
audioContext.destination.channelCount = maxChannelCount;
var merger = audioContext.createChannelMerger(maxChannelCount);
merger.connect(audioContext.destination);
gainNode.gain.value = 0.1 // 10 %
gainNode.connect(audioContext.destination)
// Create a buffer for the incoming sound content
var source = audioContext.createBufferSource();
// Create the XHR which will grab the audio contents
var request = new XMLHttpRequest();
// Set the audio file src here
request.open('GET', 'phonemes/bad-bouyed/bad.mp3', true);
// Setting the responseType to arraybuffer sets up the audio decoding
request.responseType = 'arraybuffer';
request.onload = function() {
// Decode the audio once the require is complete
audioContext.decodeAudioData(request.response, function(buffer) {
source.buffer = buffer;
// Connect the audio to source (Can I also set the gain within this declaration?)
source.connect(merger, 0,10);
// Simple setting for the buffer
source.loop = false;
// Play the sound!
source.start(0);
}, function(e) {
console.log('Audio error! ', e);
});
}
// Send the request which kicks off
request.send();
The above code loads an mp3 file in and plays it using Web Audio API, this works great although I am just wondering if it is possible to set the gain of the source AND also set the output channel using my already existing code. You can see above I have already created a gain node and connected the audio context. What is the syntax for specifying both? Do I declare them in the order I wish for them to be executed?
For example
source.connect(merger, 0,10);
source.connect(gainNode);
Any help on this would be great.

Basic Web Audio API not playing a mp3 file?

I'm trying to follow a tutorial online by piecing together the examples. I feel like this should be playing the mp3 file. I'm using the Chrome browser and it's up to date. I don't get any errors on the console. I'm not sure what I need to change or add to make this work.
<script type="text/javascript">
//creating an audio context
window.addEventListener('load',init);
function init()
{
try
{
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context=new AudioContext();
}
catch(e)
{
alert("Your browser doesn't support Web Audio API");
}
loadSound();
playSound();
}
//loading sound into the created audio context
function loadSound()
{
//set the audio file's URL
var audioURL='audio files/song.mp3';
//creating a new request
var request = new XMLhttpRequest();
request.open("GET",audioURL,true);
request.responseType= 'arraybuffer';
request.onLoad funtion(){
//take the audio from http request and decode it in an audio buffer
var audioBuffer = null;
context.decodeAudioData(request.response, function(buffer){ audioBuffer= buffer;});
}
request.send();
}, onError);
//playing the audio file
function playSound(buffer) {
//creating source node
var source = audioContext.createBufferSource();
//passing in file
source.buffer = audioBuffer;
//start playing
source.start(0);
}
</script>
</head>
</html>
You are using async XMLHttpRequest (note that it should be spelled by capital "H"), so most probably playSound function is called before request.onLoad (note: missing =) completes.
Try to trace execution of your script using console.log or similar to find bugs like this and use JavaScript Console to catch syntax errors.
i got this thing fixed :) i made use of audio tag along with web audio API. here's the code :
var audio = new Audio();
audio.src = 'audio files/song.mp3';
audio.controls = true;
audio.autoplay = true;
document.body.appendChild(audio);
var context = new webkitAudioContext();
var analyser = context.createAnalyser();
window.addEventListener('load', function(e) {
// Our <audio> element will be the audio source.
var source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
}, false);
thnks for trying to help :))
Is your audioURL correct?
audio files/song.mp3 Why is there an empty space?
============Edit============
<script>
//creating an audio context
var context;
var audioBuffer;
window.addEventListener('load', init);
function init()
{
try
{
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context=new AudioContext();
}
catch(e)
{
alert("Your browser doesn't support Web Audio API");
}
loadSound();
// playSound(); // comment here
}
//loading sound into the created audio context
function loadSound()
{
// set the audio file's URL
var audioURL='AllofMe.mp3';
//creating a new request
var request = new XMLHttpRequest();
request.open("GET",audioURL,true);
request.responseType= 'arraybuffer';
request.onload = function(){
//take the audio from http request and decode it in an audio buffer
context.decodeAudioData(request.response, function(buffer){
audioBuffer = buffer;
console.log(audioBuffer);
if(audioBuffer){ // check here
playSound();
}
});
};
request.send();
}
//playing the audio file
function playSound() {
//creating source node
var source = context.createBufferSource();
//passing in file
source.buffer = audioBuffer;
//start playing
source.connect(context.destination); // added
source.start(0);
console.log('playing');
}
</script>
I was looking for the solution to play mp3 on a mobile device, and found this page, I've made provided example to work with a help from here. Providing working example below:
var context;
var saved;
try {
context = new (window.AudioContext || window.webkitAudioContext)();
}
catch (e) {
console.log("Your browser doesn't support Web Audio API");
}
if (saved) {
playSound(saved);
} else {
loadSound();
}
//loading sound into the created audio context
function loadSound() {
//set the audio file's URL
var audioURL = '/path/to/file.mp3';
//creating a new request
var request = new XMLHttpRequest();
request.open('GET', audioURL, true);
request.responseType = 'arraybuffer';
request.onload = function () {
//take the audio from http request and decode it in an audio buffer
context.decodeAudioData(request.response, function (buffer) {
// save buffer, to not load again
saved = buffer;
// play sound
playSound(buffer);
});
};
request.send();
}
//playing the audio file
function playSound(buffer) {
//creating source node
var source = context.createBufferSource();
//passing in data
source.buffer = buffer;
//giving the source which sound to play
source.connect(context.destination);
//start playing
source.start(0);
}
It looks like playing files works fine on Android device, but not iOS. For that you have to follow this guide: How to: Web Audio on iOS (but use touchend event and replace noteOn method to start).

Web Audio API XHR Response = null

I can seem to figure out why my XHR response is null. My code appears to be correct from the examples I have read. I am currently initializing the script on DOM load with a listener, calling to load the audio file asynchronously, and calling a play function on buffer decode. Examining the response header shows null.. Does anyone have a clue as to why?
//ON DOM LOAD INITIALIZE AUDIO
window.addEventListener('load', init, false); // On Window Load Initialize
//GLOBAL VARS
var context;
var myAudioBuffer = null;
var source = null;
//INITIALIZE
function init() {
try {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
loadSound("https://s3.amazonaws.com/PersonalWebServerContent/Projects/PalmDesert/Audio/sunshine.mp3");//Load Audio Track
}
catch(e) {
alert('Sorry, your browser does not support the Web Audio API.');
}
}
//LOAD SOUND SOURCE
function loadSound(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
myAudioBuffer = buffer;
playSound(myAudioBuffer);//Play Audio Track
});
}
request.send();
}
//PLAY SOUND BUFFER
function playSound(buffer) {
source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start();
}
If you test this in Chrome 33.0.1736.2 dev you get the following error:
XMLHttpRequest cannot load https://s3.amazonaws.com/PersonalWebServerContent/Projects/PalmDesert/Audio/sunshine.mp3. No 'Access-Control-Allow-Origin' header is present on the requested resource. Origin 'http://fiddle.jshell.net' is therefore not allowed access.
Here is the source: http://jsfiddle.net/dragulceo/84G7v/
I was not able to access the URL on my S3 bucket due to the same-origin policy. My solution was to load the file locally. Thank you #tavi for the error response, it helped my come to the solution.

Categories