Why does this code work in Safari but not Chrome? Arrrgh - javascript

Chrome v6, Safari v7 (works in v6)
if('AudioContext' in window) {
var myAudioContext = new AudioContext();
const PATH = 'Sounds/',
SOUNDS = ['Tock08'];
var soundBuffers = {}, myNodes = {};
function fetchSounds() {
var request = new XMLHttpRequest();
for (var i = 0, len = SOUNDS.length; i < len; i++) {
request = new XMLHttpRequest();
request._soundName = SOUNDS[i];
request.open('GET', PATH + request._soundName + '.aiff', true);
request.responseType = 'arraybuffer';
request.addEventListener('load', bufferSound, false);
request.send();
}
}
function bufferSound(event) {
var request = event.target;
var buffer = myAudioContext.createBuffer(request.response, false);
soundBuffers[request._soundName] = buffer;
}
}
function clickSound() {
if('AudioContext' in window ) { //Chrome doesn't work with this or above code
// create a new AudioBufferSourceNode
source = myAudioContext.createBufferSource();
source.buffer = soundBuffers['Tock08'];
source.loop = false;
source.connect(myNodes.volume);
myNodes.volume.gain.value = 1;
myNodes.volume.connect(myAudioContext.destination);
source.noteOn(0); // play right now (0 seconds from now)
}
}
In Safari, all is well. An array of sound buffers is created, and a call to clickSound results in a satisfying "click".
In Chrome, things are different.
The line:
var buffer = myAudioContext.createBuffer(request.response, false);
is flagged with
Uncaught SyntaxError: An invalid or illegal string was specified.
on loading.
Then if I call "clickSound" I get:
source.buffer = soundBuffers['Tock08'];
Uncaught TypeError: Value is not of type AudioBuffer.
Does anyone know why this problem occurs?

Chrome still uses an older webkitAudioContext. This is how I set up context in SoundJS, and it works everywhere:
if (window.webkitAudioContext) {
s.context = new webkitAudioContext();
} else if (window.AudioContext) {
s.context = new AudioContext();
}
Hope that helps.

Related

AudioContext stops working after being used exactly 50 times

Hi so i'm trying to make a Drum-Kit, for this i'm using AudioContext API. My issue is when I use it exactly 50 times than it stops working. Only thing I found to make it work is to close the previous AudioContext that was used, thing is that it makes a "clicky" sound due to the sound stoping abruptly.
Any ideas on what to do?
Here is what i'm working with to not make it stop at 50 uses:
let i = 0;
let audioContext;
let volumeControl;
// Credit to MDN for AudioContext and StereoPannerNode tutorial.
function playSound(src, volume, pitch, stereo) {
if (audioContext != null) {
//volumeControl.gain.value = 0;
audioContext.close();
}
console.log(i++);
audioContext = new AudioContext();
const stereoControl = new StereoPannerNode(audioContext);
volumeControl = audioContext.createGain();
volumeControl.gain.value = volume;
stereoControl.pan.value = stereo;
const source = audioContext.createBufferSource();
const request = new XMLHttpRequest();
request.open('GET', src, true);
request.responseType = 'arraybuffer';
request.onload = function() {
const audioData = request.response;
audioContext.decodeAudioData(audioData, function(buffer) {
source.buffer = buffer;
source.playbackRate.value = pitch;
source.connect(volumeControl).connect(stereoControl).connect(audioContext.destination);
});
};
request.send();
source.play = source.start;
source.play();
}
Don't create an audio context for each sound, but create a single one for your page and add nodes to it. Something like this...
const audioContext = new AudioContext();
function playSound(src, volume, pitch, stereo) {
const stereoControl = audioContext.createStereoPanner();
const volumeControl = audioContext.createGain();
volumeControl.gain.value = volume;
stereoControl.pan.value = stereo;
const source = audioContext.createBufferSource();
const request = new XMLHttpRequest();
request.open("GET", src, true);
request.responseType = "arraybuffer";
request.onload = function() {
const audioData = request.response;
audioContext.decodeAudioData(audioData, function(buffer) {
source.buffer = buffer;
source.playbackRate.value = pitch;
source
.connect(volumeControl)
.connect(stereoControl)
.connect(audioContext.destination);
source.start();
});
};
request.send();
}
In addition, for a drumkit thing, you'll want to either preload all samples, or at the very least cache the decoded audio buffers and not do a request every time for them:
const cache = {};
const audioContext = new AudioContext();
function loadSound(src) {
if (cache[src]) {
// Already cached
return Promise.resolve(cache[src]);
}
return new Promise(resolve => {
const request = new XMLHttpRequest();
request.open("GET", src, true);
request.responseType = "arraybuffer";
request.onload = function() {
const audioData = request.response;
audioContext.decodeAudioData(audioData, function(buffer) {
cache[src] = buffer;
resolve(buffer);
});
};
request.send();
});
}
function playSound(src, volume, pitch, stereo) {
loadSound(src).then(buffer => {
const stereoControl = audioContext.createStereoPanner();
const volumeControl = audioContext.createGain();
volumeControl.gain.value = volume;
stereoControl.pan.value = stereo;
const source = audioContext.createBufferSource();
source.buffer = buffer;
source.playbackRate.value = pitch;
source
.connect(volumeControl)
.connect(stereoControl)
.connect(audioContext.destination);
source.start();
});
}

The ScriptProcessorNode doesn't work with OfflineContext

The ScriptProcessorNode doesn't work with OfflineContext.
It works in Chrome, Mozilla Firefox.
It doesn't work in Edge 25, Safari 10.
The issue is that the event is called once when the context OfflineContextis is processed.
Example on jsfiddle without BufferSource.
Example on jsfiddle based on MDN example with BufferSource.
console.clear();
var playButton = document.querySelector('.play');
var playButtonOffline = document.querySelector('.play-offline');
var current = 0;
var buffer_size = 4096;
var buffer_length = buffer_size * 10;
var audioCtx = new(window.AudioContext || window.webkitAudioContext)();
var scriptNode = audioCtx.createScriptProcessor(buffer_size, 1, 1);
scriptNode.onaudioprocess = whiteNoise;
function whiteNoise(audioProcessingEvent) {
console.log('onaudioprocess', current);
// The output buffer contains the samples that will be modified and played
var outputBuffer = audioProcessingEvent.outputBuffer;
// Loop through the output channel
for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++) {
var outputData = outputBuffer.getChannelData(channel);
for (var sample = 0; sample < buffer_size; sample++) {
// add noise to each output sample
outputData[sample] += ((Math.random() * 2) - 1);
}
}
current += buffer_size;
if (current > buffer_length)
scriptNode.disconnect();
}
playButton.onclick = function() {
current = 0;
scriptNode.connect(audioCtx.destination);
}
playButtonOffline.onclick = function() {
var offlineCtx = new(window.OfflineAudioContext || window.webkitOfflineAudioContext)(1, buffer_length, 48000);
var scriptNodeOffline = offlineCtx.createScriptProcessor(buffer_size, 1, 1);
scriptNodeOffline.onaudioprocess = whiteNoise;
current = 0;
offlineCtx.oncomplete = function(e) {
console.log('rendered buffer', e.renderedBuffer.getChannelData(0).filter(f => f != 0).length);
}
scriptNodeOffline.connect(offlineCtx.destination);
offlineCtx.startRendering();
}
<button class="play">
play
</button>
<button class="play-offline">
Render offline
</button>
Update
Clicking on Render offline many times in Chrome and Firefox produced the same output.
Clicking on Render offline many times in Safari and Edge produced the different output.
Example on jsfiddle.
// Create AudioContext and buffer source
console.clear();
var playButton = document.querySelector('.play');
var playButtonOffline = document.querySelector('.play-offline');
var myBuffer = null;
var audioCtx = new(window.AudioContext || window.webkitAudioContext)();
var source = audioCtx.createBufferSource();
// Create a ScriptProcessorNode with a bufferSize of 4096 and a single input and output channel
var scriptNode = audioCtx.createScriptProcessor(4096, 1, 1);
// load in an audio track via XHR and decodeAudioData
function getData() {
request = new XMLHttpRequest();
request.open('GET', 'https://s3-ap-northeast-1.amazonaws.com/storage.cowrite.decodeapps.io/Materials/Media/Audio/59f2b85dd3aed-20171027-043853.mp3', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
myBuffer = buffer;
source.buffer = myBuffer;
},
function(e) {
"Error with decoding audio data" + e.err
});
}
request.send();
}
function addNoise(audioProcessingEvent) {
console.log("onaudioprocess")
// The input buffer is the song we loaded earlier
var inputBuffer = audioProcessingEvent.inputBuffer;
// The output buffer contains the samples that will be modified and played
var outputBuffer = audioProcessingEvent.outputBuffer;
// Loop through the output channels (in this case there is only one)
for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++) {
var inputData = inputBuffer.getChannelData(channel);
var outputData = outputBuffer.getChannelData(channel);
// Loop through the 4096 samples
for (var sample = 0; sample < inputBuffer.length; sample++) {
// make output equal to the same as the input
outputData[sample] = inputData[sample];
// add noise to each output sample
outputData[sample] += ((Math.random() * 2) - 1) * 0.2;
}
}
}
// Give the node a function to process audio events
scriptNode.onaudioprocess = addNoise;
getData();
// wire up play button
playButton.onclick = function() {
source.connect(scriptNode);
scriptNode.connect(audioCtx.destination);
source.start();
}
// When the buffer source stops playing, disconnect everything
source.onended = function() {
source.disconnect(scriptNode);
scriptNode.disconnect(audioCtx.destination);
}
// When the buffer source stops playing, disconnect everything
// wire up play button
playButtonOffline.onclick = function() {
var offlineCtx = new(window.OfflineAudioContext || window.webkitOfflineAudioContext)(2, myBuffer.length, myBuffer.sampleRate);
var scriptNodeOffline = offlineCtx.createScriptProcessor(4096, 1, 1);
var sourceOffline = offlineCtx.createBufferSource();
sourceOffline.buffer = myBuffer;
sourceOffline.onended = function() {
console.log('sourceOffline.onended');
sourceOffline.disconnect(scriptNodeOffline);
scriptNodeOffline.disconnect(offlineCtx.destination);
}
scriptNodeOffline.onaudioprocess = addNoise;
sourceOffline.connect(scriptNodeOffline);
scriptNodeOffline.connect(offlineCtx.destination);
sourceOffline.start();
offlineCtx.oncomplete = function(e) {
console.log('renderedBuffer', e.renderedBuffer.getChannelData(0).filter(f => f != 0).length);
listenRendered(e.renderedBuffer);
};
offlineCtx.startRendering();
}
var _audioCtx = new(window.AudioContext || window.webkitAudioContext)();
function listenRendered(buffer) {
var _source = _audioCtx.createBufferSource();
_source.buffer = buffer;
_source.connect(_audioCtx.destination);
_source.start();
}
<button class="play">
play
</button>
<button class="play-offline">
Render offline
</button>
These are bugs in Safari and Edge. A ScriptProcessorNode should work fine in an offline context. File bugs with Safari and Edge.

Building a Web Audio Amplifier

I'm trying to build an amplifier using a ScriptProcessorNode (yes, I know it is deprecated and yes, I'm fully aware of the createGainNode). The reason being is that the createGainNode is better suited to attenuate gain rather than boost it. I'm an audio engineer and I want to be able to clip my audio. I built a static version that handles a gain of at least 150 (as long as you limit the value of the floats after the boost). However, this new version, which is controlled by range input, just cuts the volume off completely.
I'm not getting any errors in the console; why is the volume cutting out?
<body>
<div id="button" style="background:#000;width:50px;height:50px;" onclick="connect()"></div>
<input id="range" type="range" min="1" max="128" value="1" oninput="myFunction(value)"/>
<script>
var context,
soundSource,
scriptNode,
buffer,
value = 1;
if(typeof AudioContext !== 'undefined') {
context = new AudioContext();
}else if(typeof webkitAudioContext !== 'undefined') {
context = new webkitAudioContext();
}else {
throw new Error('AudioContext not supported. :(');
}
function xhr() {
var request = new XMLHttpRequest();
request.open("GET", 'bass.wav', true);
request.responseType = 'arraybuffer';
request.onload = function() {
context.decodeAudioData(request.response, function onSuccess(decodedData) {
buffer = decodedData;
}, function onFailure() {
alert("Decoding the audio buffer failed");
});
}
request.send();
}
function connect() {
scriptNode = context.createScriptProcessor(256, 2, 2);
scriptNode.onaudioprocess = function(audioProcessingEvent) {
var input = audioProcessingEvent.inputBuffer;
var output = audioProcessingEvent.outputBuffer;
for(var channel = 0; channel < datum.numberOfChannels; channel++) {
var I = input.getChannelData(channel);
var O = output.getChannelData(channel);
for(var i = 0; i < input.length; i++) {
O[i] = I[i];
O[i] *= value;
}
}
}
soundSource = context.createBufferSource();
soundSource.buffer = buffer;
soundSource.connect(scriptNode);
scriptNode.connect(context.destination);
soundSource.start();
}
connect();
function myFunction(val) {
value = val;
soundSource.disconnect();
scriptNode.disconnect();
soundSource.connect(scriptNode);
scriptNode.connect(context.destination);
}
</script>
EDIT
Instead of trying to build my own amplifier, I should have just used the createGainNode because it works exactly as intended.
EDIT 2
After playing with the createGainNode more, I figured out the original problem. In Chrome, the createGainNode works fine, but in Firefox, it didn't hold up as well as my amplification code. It will amplify the quieter part of the signals, but when the louder parts come in, the audio drops. Is there any work around for this? My amplification code:
function connect() {
soundSource = context.createBufferSource();
soundSource.buffer = buffer;
volume = context.createGain();
volume.gain.value = 1500;
soundSource.connect(volume);
volume.connect(context.destination);
soundSource.start();
}
function amplify() {
soundSource.stop();
var left = new Float32Array;
var right = new Float32Array;
left = buffer.getChannelData(0);
right = buffer.getChannelData(1);
for(var i = 0; i < left.length; i++) {
left[i] = left[i] * 1500;
if(left[i] > 0) {
left[i] = 128;
}
if(left[i] < 0) {
left[i] = -128;
}
right[i] = right[i] * 1500;
if(right[i] > 0) {
right[i] = 128;
}
if(right[i] < 0) {
right[i] = -128;
}
}
buffer.copyToChannel(left,0);
buffer.copyToChannel(right,1);
var amp = context.createBufferSource();
amp.buffer = buffer;
amp.connect(context.destination);
amp.start();
}
EDIT 3
I've opened a bug report on bugzilla here: https://bugzilla.mozilla.org/show_bug.cgi?id=1233821

Safari iOS 6 - ajax request blob image

I have a function that mainly download images in a blob object, and it's working fine on chrome, FF, iOS 7+, but not on iOS 6...
downloadImage: function( url ) {
var that = this;
return new Ember.RSVP.Promise(function( resolve, reject ) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url);
xhr.onreadystatechange = function() {
if (this.readyState === this.DONE) {
that.chart.incrementProgress();
if (this.status === 200) {
var blob = this.response;
resolve( that.imageStore.writeImage( that, url, blob ) );
}
else {
resolve();
}
}
};
xhr.responseType = 'blob';
xhr.send();
});
}
In iOS6 in the console debugger, when I want to see my blob object, its seems to be a string with super weird character in it.. I'm not sure if it normal or my request doesn't work properly on this version of iOS.
After that I need to convert it into a base64, so I use FileReader for that like this :
this.writeImage = function( controller, url, blob ) {
var that = this;
return new Ember.RSVP.Promise(function( resolve ) {
var reader = new window.FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
var base64 = reader.result;
var object = { id: url, key: url, base64: base64 };
//controller.store.update('image', object).save();
controller.store.findQuery('image', { key: url })
.then(function( result ) {
var record = result.content[0];
record._data.base64 = base64;
record.save().then( resolve );
})
.catch(function() {
controller.store.createRecord('image', object).save().then( resolve );
});
};
});
};
Don't pay attention to the Promise thing and other arguments, but the blob is the same as the one in the downloadImage function.
And for a mysterious reason, the reader.loadend is never triggered because the state in reader is always at 0.
Should I do something particular for iOS6 or my code is wrong ?
[edit] : It's like the onloadend callback is not triggered ??
[edit2] : After further investigation, it seems that the response from the ajax request is a string instead of a blob... And my responseType is set as "" as well ?
I have found a workaround for now, I convert my binaryString into a blob like this :
function binaryStringToBlob( byteCharacters, contentType ) {
var sliceSize = 1024;
var bytesLength = byteCharacters.length;
var slicesCount = Math.ceil(bytesLength / sliceSize);
var byteArrays = new Array(slicesCount);
for (var sliceIndex = 0; sliceIndex < slicesCount; ++sliceIndex) {
var begin = sliceIndex * sliceSize;
var end = Math.min(begin + sliceSize, bytesLength);
var bytes = new Array(end - begin);
for (var offset = begin, i = 0 ; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
You just need to get the content-type and here you go !

Uncaught reference error BufferLoader is not defined

Trying to learn the Audio API, but I get an Uncaught reference error for BufferLoader class. I'm on chrome and it's up to date. Shouldn't this class be working with no problems?
<html>
<head>
<script type=text/javascript>
window.onload = init;
var context;
var bufferLoader;
function init(){
context = new webkitAudioContext();
bufferLoader = new BufferLoader(
context,
[
' https://dl.dropboxusercontent.com/u/1957768/kdFFO3.wav',
' https://dl.dropboxusercontent.com/u/1957768/geniuse%20meodies.wav',
],
finishedLoading
);
bufferLoader.load();
}
function finishedLoading(bufferList){
//make two sources and play them
var source1 = context.createBufferSource();
var source2 = context.createBufferSource();
source1.buffer = bufferList[0];
source2.buffer = bufferList[1];
source1.connect(context.destination);
source2.connect(context.destination);
source1.start(0);
source2.start(0);
}
</script>
</head>
<body>
</body>
</html>
The BufferLoader "class" is a custom function created to abstract the use of the Web Audio API. It's not a built-in feature, and must be included in your page in order to be used; there is nothing special about Chrome having this. Here's an example of where it is explained: http://www.html5rocks.com/en/tutorials/webaudio/intro/#toc-abstract
To use, include this code before it is used:
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}

Categories