I know there's an option loop=true on an AudioBufferSourceNode. But I just want to repeat the wav a certain number of times. For example, how might I repeat it twice?
var url = 'main.wav';
var context = new AudioContext();
var source = context.createBufferSource();
source.connect(context.destination);
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
context.decodeAudioData(request.response, function(response) {
source.buffer = response;
source.start(0);
source.start(source.buffer.duration); // doesn't work "cannot call start more than once."
//source.loop = true; // this is infinite amount of times, not good
}, function () { console.error('The request failed.'); } );
}
request.send();
I also tried to create a second buffer:
var source2 = context.createBufferSource();
// inside callback
source2.buffer = response; // same response so I don't need to load this separately
source2.start(source.buffer.duration); // doesn't work
But that didn't work either. Any ideas?
Your approach with using second buffer is almost correct. You just need to use absolute start time, not relative. To obtain absolute time sum audio context's current time and duration of the sound:
// Play first sound now
var source1 = context.createBufferSource();
source1.buffer = response;
source1.start(context.currentTime); // This is the same as source1.start();
// Play second sound with a delay
var source2 = context.createBufferSource();
source2.buffer = response;
source2.start(context.currentTime + response.duration);
This approach provides gapless playback (if your files are gapless).
You can wrap this into a simple function:
function play(context, buffer, delay=0) {
var source = context.createBufferSource();
source.buffer = buffer;
source.start(context.currentTime + delay);
return source;
}
...
// Play the sound twice, one after another
play(context, response, 0);
play(context, response, response.duration);
You can set the AudioBufferSource's loop to true and call
source.stop( ctx.currentTime + audioBuffer.duration * repeatTime )
(async () => {
const url = 'https://dl.dropboxusercontent.com/s/1cdwpm3gca9mlo0/kick.mp3';
const buf = await fetch( url ).then( (r) => r.arrayBuffer() );
const ctx = new AudioContext();
const audioBuffer = await ctx.decodeAudioData( buf );
btn.onclick = (e) => {
const source = ctx.createBufferSource();
source.buffer = audioBuffer;
source.loop = true;
source.connect( ctx.destination );
source.start( 0 );
source.stop( ctx.currentTime + audioBuffer.duration * inp.valueAsNumber );
};
btn.disabled = false;
})().catch( console.error );
<label> repeat<input id="inp" type="number" min="1" max="20" value="5"></label>
<button id="btn" disabled>start</button>
Another alternative is to use the 3-arg version of start(). Something like:
let nReps = 2; /* Number of times to repeat the source */
let s = new AudioBufferSourceNode(context,
{buffer: buffer, loop: true});
s.start(startTime, 0, buffer.duration * nReps);
One solution would be to allow the loop behavior of your source and then schedule a callback to stop() the sources playback after a period of time that would allow playback of the sound file multiple times:
const url = 'main.wav';
const context = new AudioContext();
const source = context.createBufferSource();
// Tracks the number to playback cycles remaining
const playbackCount = 5;
source.connect(context.destination);
const request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = () => {
context.decodeAudioData(request.response, (response) => {
source.buffer = response;
// Schedule sound to stop at future time, after which sound
// should have played "playbackCount" number of times
setTimeout(() => {
source.stop();
}, response.duration * playbackCount * 1000);
// Enable loop behavior
source.loop = true;
source.start(0);
}, () => {
console.error('The request failed.');
});
}
request.send();
Related
Hi so i'm trying to make a Drum-Kit, for this i'm using AudioContext API. My issue is when I use it exactly 50 times than it stops working. Only thing I found to make it work is to close the previous AudioContext that was used, thing is that it makes a "clicky" sound due to the sound stoping abruptly.
Any ideas on what to do?
Here is what i'm working with to not make it stop at 50 uses:
let i = 0;
let audioContext;
let volumeControl;
// Credit to MDN for AudioContext and StereoPannerNode tutorial.
function playSound(src, volume, pitch, stereo) {
if (audioContext != null) {
//volumeControl.gain.value = 0;
audioContext.close();
}
console.log(i++);
audioContext = new AudioContext();
const stereoControl = new StereoPannerNode(audioContext);
volumeControl = audioContext.createGain();
volumeControl.gain.value = volume;
stereoControl.pan.value = stereo;
const source = audioContext.createBufferSource();
const request = new XMLHttpRequest();
request.open('GET', src, true);
request.responseType = 'arraybuffer';
request.onload = function() {
const audioData = request.response;
audioContext.decodeAudioData(audioData, function(buffer) {
source.buffer = buffer;
source.playbackRate.value = pitch;
source.connect(volumeControl).connect(stereoControl).connect(audioContext.destination);
});
};
request.send();
source.play = source.start;
source.play();
}
Don't create an audio context for each sound, but create a single one for your page and add nodes to it. Something like this...
const audioContext = new AudioContext();
function playSound(src, volume, pitch, stereo) {
const stereoControl = audioContext.createStereoPanner();
const volumeControl = audioContext.createGain();
volumeControl.gain.value = volume;
stereoControl.pan.value = stereo;
const source = audioContext.createBufferSource();
const request = new XMLHttpRequest();
request.open("GET", src, true);
request.responseType = "arraybuffer";
request.onload = function() {
const audioData = request.response;
audioContext.decodeAudioData(audioData, function(buffer) {
source.buffer = buffer;
source.playbackRate.value = pitch;
source
.connect(volumeControl)
.connect(stereoControl)
.connect(audioContext.destination);
source.start();
});
};
request.send();
}
In addition, for a drumkit thing, you'll want to either preload all samples, or at the very least cache the decoded audio buffers and not do a request every time for them:
const cache = {};
const audioContext = new AudioContext();
function loadSound(src) {
if (cache[src]) {
// Already cached
return Promise.resolve(cache[src]);
}
return new Promise(resolve => {
const request = new XMLHttpRequest();
request.open("GET", src, true);
request.responseType = "arraybuffer";
request.onload = function() {
const audioData = request.response;
audioContext.decodeAudioData(audioData, function(buffer) {
cache[src] = buffer;
resolve(buffer);
});
};
request.send();
});
}
function playSound(src, volume, pitch, stereo) {
loadSound(src).then(buffer => {
const stereoControl = audioContext.createStereoPanner();
const volumeControl = audioContext.createGain();
volumeControl.gain.value = volume;
stereoControl.pan.value = stereo;
const source = audioContext.createBufferSource();
source.buffer = buffer;
source.playbackRate.value = pitch;
source
.connect(volumeControl)
.connect(stereoControl)
.connect(audioContext.destination);
source.start();
});
}
I'm trying to get a MediaStream object into a blob, but I have some problems.
This is the part of my script for the UserMedia component:
var mediastream = undefined;
var blob = undefined;
var video = document.getElementById('test');
This is my script where I'm trying my test:
var mediastream = undefined;
var blobUrl = undefined;
var video = document.getElementById('test');
function successCallback (stream) {
mediastream = stream;
blobUrl = URL.createObjectURL(mediastream);
var xhr = new XMLHttpRequest();
xhr.open('GET', blobUrl, true);
xhr.responseType = 'arraybuffer';
xhr.withCredentials = true;
xhr.onreadystatechange = function () {
console.log('xhr.readyState='+xhr.readyState);
if (xhr.readyState !== 4) {
return;
} else {
var myBlob = this.response;
}
function errorCallback (stream) {
console.log('error');
}
Setting the blobUrl to the video.src I don't have any problem:
video.src = blobUrl; <- WORKS
But if I call the urlBob url (like this one: blob:http%3A//localhost%3A8080/b1ffa5b7-c0cc-4acf-8890-f5d0f08de9cb ) I obtain HTTP status 404.
For sure there is something wrong on my implementation.
Any suggestions?
Thank you!
I am trying to stream MP3 file from a nodeJS server using BinaryJS - http://binaryjs.com/
But, when I am decoding the buffers on the client side they are seems to be overlapping, Meaning that the new chunk of data is being played few milliseconds before the previous one ended, causing the audio to lag.
is there any way to make the client wait until the current buffer is finished before starting the new one?
Server:
var BinaryServer = require('binaryjs').BinaryServer;
var fs = require('fs');
var server = BinaryServer({port: 9000});
server.on('connection', function(client){
var file = fs.createReadStream(__dirname + '/Song.mp3', {
'flags': 'r',
'bufferSize': 4 * 1024
});
});
client.send(file);
});
Client:
var client = new BinaryClient('ws://localhost:9000');
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
client.on('stream', function (stream, meta) {
var parts = [];
var last = 0;
stream.on('data', function (data) {
var source = context.createBufferSource();
context.decodeAudioData(data, function (buf) {
source.buffer = buf;
source.connect(context.destination);
source.loop = false;
source.start(last);
last += buf.duration;
source.onended = function() {
console.log('Your audio has finished playing');
};
},
function (e) {
"Error with decoding audio data" + e.err
});
parts.push(data);
});
stream.on('end', function () {
console.log(parts);
});
});
Not sure about this, but instead of initializing last to 0, you might want to initialize it to context.currentTime.
I'm new to programming and I'm messing around with the Web Audio API. Right now, I have three samples (kick, clap, hihat) that make up a simple drumkit beat when a Play button is pressed.
I want to be able to illustrate this visually on the front end as a sequencer that plays through this drumkit. For instance, every time the "kick.wav" sample is played, I want to change the color of a div that is associated with it.
My questions are:
How do I associate every time a kick, clap or hihat are played with a div in the html?
How can I add this association to sequence through when the play button is hit?
HTML:
<body>
<div id="container">
<canvas id="canvas"></canvas>
</div>
<button id="play">play</button>
<script src="javascript/tween.js"></script>
<script src="javascript/shared.js"></script>
<script src="javascript/seq.js"></script>
</body>
Javascript:
function playSound(buffer, time) {
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start(time);
}
function loadSounds(obj, soundMap, callback) {
// Array-ify
var names = [];
var paths = [];
for (var name in soundMap) {
var path = soundMap[name];
names.push(name);
paths.push(path);
}
bufferLoader = new BufferLoader(context, paths, function(bufferList) {
for (var i = 0; i < bufferList.length; i++) {
var buffer = bufferList[i];
var name = names[i];
obj[name] = buffer;
}
if (callback) {
callback();
}
});
bufferLoader.load();
}
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
};
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
};
var RhythmSample = function() {
loadSounds(this, {
kick: 'sounds/kick.wav',
claps: 'sounds/claps.wav',
hihat: 'sounds/hihat.wav'
});
};
RhythmSample.prototype.play = function() {
// We'll start playing the rhythm 100 milliseconds from "now"
var startTime = context.currentTime + 0.100;
var tempo = 120; // BPM (beats per minute)
var eighthNoteTime = (60 / tempo) / 2;
var allDivs = document.getElementsByName('colorchangingdivs[]');
// Play 2 bars of the following:
for (var bar = 0; bar < 2; bar++) {
var time = startTime + bar * 8 * eighthNoteTime;
// Play the bass (kick) drum on beats 1, 5
playSound(this.kick, time);
playSound(this.kick, time + 4 * eighthNoteTime);
console.log("4")
// Play the snare drum on beats 3, 7
playSound(this.claps, time + 2 * eighthNoteTime);
playSound(this.claps, time + 6 * eighthNoteTime);
// Play the hi-hat every eighthh note.
for (var i = 0; i < 8; ++i) {
playSound(this.hihat, time + i * eighthNoteTime);
}
}
};
var sample = new RhythmSample();
document.querySelector('#play').addEventListener('click', function() {
sample.play();
});
THANKS SO MUCH!
Add another parameter to playSound() that matches the div id of the one you want to color. And logic to change the color when playing the sound, selecting the div by the id you've passed in.
function playSound(buffer, time, colorID) {
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start(time);
document.getElementById(colorID).backgroundColor = blue; //or hex color/rgb value
}
Then you just have to add the right parameter when you call playSound.
playSound(this.kick, time + 4 * eighthNoteTime,"your-kick-div-id");
If you need different colors for different sounds/divs, then just add an if/elseif statement in the background color setting.
Chrome v6, Safari v7 (works in v6)
if('AudioContext' in window) {
var myAudioContext = new AudioContext();
const PATH = 'Sounds/',
SOUNDS = ['Tock08'];
var soundBuffers = {}, myNodes = {};
function fetchSounds() {
var request = new XMLHttpRequest();
for (var i = 0, len = SOUNDS.length; i < len; i++) {
request = new XMLHttpRequest();
request._soundName = SOUNDS[i];
request.open('GET', PATH + request._soundName + '.aiff', true);
request.responseType = 'arraybuffer';
request.addEventListener('load', bufferSound, false);
request.send();
}
}
function bufferSound(event) {
var request = event.target;
var buffer = myAudioContext.createBuffer(request.response, false);
soundBuffers[request._soundName] = buffer;
}
}
function clickSound() {
if('AudioContext' in window ) { //Chrome doesn't work with this or above code
// create a new AudioBufferSourceNode
source = myAudioContext.createBufferSource();
source.buffer = soundBuffers['Tock08'];
source.loop = false;
source.connect(myNodes.volume);
myNodes.volume.gain.value = 1;
myNodes.volume.connect(myAudioContext.destination);
source.noteOn(0); // play right now (0 seconds from now)
}
}
In Safari, all is well. An array of sound buffers is created, and a call to clickSound results in a satisfying "click".
In Chrome, things are different.
The line:
var buffer = myAudioContext.createBuffer(request.response, false);
is flagged with
Uncaught SyntaxError: An invalid or illegal string was specified.
on loading.
Then if I call "clickSound" I get:
source.buffer = soundBuffers['Tock08'];
Uncaught TypeError: Value is not of type AudioBuffer.
Does anyone know why this problem occurs?
Chrome still uses an older webkitAudioContext. This is how I set up context in SoundJS, and it works everywhere:
if (window.webkitAudioContext) {
s.context = new webkitAudioContext();
} else if (window.AudioContext) {
s.context = new AudioContext();
}
Hope that helps.