So i have a bunch of loaded audio samples that I am calling the schedule function with in the code below:
let audio;
function playChannel() {
let audioStart = context.currentTime;
let next = 0;
for(let i = 0; i < 8; i++) {
scheduler(audioStart, next);
next++;
}
}
Here is the audio scheduler function:
function scheduler(audioStart, index) {
audio = context.createBufferSource();
audio.buffer = audioSamples[index]; //array with all the loaded audio
audio.connect(context.destination);
audio.start(audioStart + (audio.buffer.duration * index));
}
And it's working fine and plays the scheduled sounds as it should.
How am I supposed to stop/cancel all the scheduled sounds from playing?
Because right now when I try to call the stop() method it will only stop the last scheduled sound from playing.
You'll need to keep track of the BufferSource nodes you're creating inside scheduler, referenced by index, and then run through all of them. E.g.:
var sources = [];
function scheduler(audioStart, index) {
audio = context.createBufferSource();
sources[index] = audio;
audio.buffer = audioSamples[index]; //array with all the loaded audio
audio.connect(context.destination);
audio.start(audioStart + (audio.buffer.duration * index));
}
function stopAll() {
for(let i = 0; i < 8; i++)
if (sources[i])
sources[i].stop(0);
}
Related
I'm trying to set the current time for the last video in the playlist, but it always fails.
This is the code I'm working on with my try.
It doesn't set the time at all and I don't know how to solve.
var iframe = document.querySelector('iframe.main-player');
var player = new Vimeo.Player(iframe);
var video_ids = [123456789, 987654321, 543216789];
var index = 0;
var playNext = function(data) {
if (index <= video_ids.length)
player.loadVideo(video_ids[index++])
}
let last = video_ids.length - 1;
player.loadVideo(video_ids[index++]);
player.on('loaded', function() {
if (last == index++) {
player.setCurrentTime(200);
}
player.play();
});
player.on('ended', playNext);
I would like my audio2 file to play when audio1.currentTime is 3 seconds, but I'm not able to make it work. I'm a newbie in javascript, what am I missing?. This is my current javascript code:
function initAudioPlayer(){
var audio1, audio2, ext, agent;
ext = ".mp3";
agent = navigator.userAgent.toLocaleLowerCase();
if(agent.indexOf('firefox') != -1 || agent.indexOf('opera') != -1) { ext = ".ogg";}
//Audio Objects: audio1 and audio2
audio1 = new Audio();
audio1.src = "folder/Audio1"+ext;
audio1.loop = false;
audio1.play();
audio2 = new Audio();
audio2.src = "folder/Audio2"+ext;
audio2.loop = false;
audio2.play();
//Function that reproduces the second audio file at second 3 of the first audio file
function audio2(){
if(audio1.currentTime == 3) {audio2.play();}
};
}
window.addEventListener("load", initAudioPlayer);
You Must use Audio Api and fetch buffers of your files.
Then you must plus each byte and copy to another new buffer.
this code can help you:
let idnex=0;
samples.forEach(buufer => {
if (index === 0) {
tempBuf = buufer;
} else {
tempBuf = this.appendBuffer(tempBuf, buufer);
}
index++;
});
and by thi method you can append two buffer:
private appendBuffer(buffer1, buffer2) {
const numberOfChannels = Math.min(buffer1.numberOfChannels, buffer2.numberOfChannels);
const tmp = this.audioContextService.createBuffer(Math.max(buffer1.numberOfChannels, buffer2.numberOfChannels),
Math.max(buffer1.length, buffer2.length), buffer1.sampleRate);
for (let i = 0; i < numberOfChannels; i++) {
const channel = tmp.getChannelData(i);
let finallArray = [];
let d = [];
const chanelTemp = buffer1.getChannelData(i);
if (buffer2.numberOfChannels <= i) {
finallArray = chanelTemp;
} else {
const c = buffer2.getChannelData(i);
if (chanelTemp.length > c.length) {
finallArray = chanelTemp;
d = c;
} else {
finallArray = c;
d = chanelTemp;
}
for (let j = 0; j < d.length; j++) {
finallArray[j] += d[j] / 2;
}
}
channel.set(finallArray, i);
}
you can see my demo here
Also You Can See this Answer
If you truly want this to be accurate in time, you can't use Audio() - that's the HTML5 <audio> element, which is not sample-accurate. Javascript is also not accurate enough in event delivery to use a callback to do this (to be fair, neither are most general-purpose native OS APIs). You need to schedule the playback in advance, which is where Web Audio (https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API) comes in. You need to load both samples, decode them into AudioBuffers, and then schedule playing back each of them with an AudioBufferSourceNode.
I have an html5 video element and I need to apply different processing realtime on the video's output audio. On desktop I made it work with the WebAudio API. The Api is seemingly present on iOS also. I am able to inspect the created objects, but it doesn't modify the video's output signal.
Here's my example code:
$(function () {
window.AudioContext = window.AudioContext||window.webkitAudioContext;
var audioContext = new AudioContext();
var bufferSize = 1024;
var selectedChannel = 0;
var effect = (function() {
var node = audioContext.createScriptProcessor(bufferSize, 2, 2);
node.addEventListener('audioprocess', function(e) {
var input = e.inputBuffer.getChannelData(selectedChannel);
var outputL = e.outputBuffer.getChannelData(0);
var outputR = e.outputBuffer.getChannelData(1);
for (var i = 0; i < bufferSize; i++) {
outputL[i] = selectedChannel==0? input[i] : 0.0;
outputR[i] = selectedChannel==1? input[i] : 0.0;
}
});
return node;
})();
var streamAttached = false;
function attachStream(video) {
if (streamAttached) {
return;
}
var source = audioContext.createMediaElementSource(video);
source.connect(effect);
effect.connect(audioContext.destination);
streamAttached = true;
}
function iOS_video_touch_start() {
var video = $('#vid')[0];
video.play();
attachStream(video);
}
var needtouch = false;
$('#vid').on('play', function () {
attachStream(this);
}).on('loadedmetadata', function () {
this.play();
this.volume=1.0;
if (this && this.paused) {
if (needtouch == false) {
needtouch = true;
this.addEventListener("touchstart", iOS_video_touch_start, true);
}
}
});
window.panToRight = function(){
selectedChannel = 1;
};
window.panToLeft = function(){
selectedChannel = 0;
};
});
You can also check it on CP:
http://codepen.io/anon/pen/pgeJQG
With the buttons you are able to toggle between the left and the right channels. On desktop browsers (Chrome, Firefox, Safari tested) it works fine.
I have also tried the older createJavaScriptNode() instead of createScriptProcessor(). I have also tried it with an alternative effect chain, which was looking like this:
var audioContext = new (window.AudioContext||window.webkitAudioContext)();
audioContext.createGain = audioContext.createGain||audioContext.createGainNode;
var gainL = audioContext.createGain();
var gainR = audioContext.createGain();
gainL.gain.value = 1;
gainR.gain.value = 1;
var merger = audioContext.createChannelMerger(2);
var splitter = audioContext.createChannelSplitter(2);
//Connect to source
source = audioContext.createMediaElementSource(video);
//Connect the source to the splitter
source.connect(splitter, 0, 0);
//Connect splitter' outputs to each Gain Nodes
splitter.connect(gainL, 0);
splitter.connect(gainR, 1);
//Connect Left and Right Nodes to the Merger Node inputs
//Assuming stereo as initial status
gainL.connect(merger, 0, 0);
gainL.connect(merger, 0, 1);
//Connect Merger output to context destination
merger.connect(audioContext.destination, 0, 0);
As you probably noticed this code was using the built in nodes only. But no luck.
So my questions are: Is this even possible on mobile? If it is, than what am I missing? If it is not, than any possible workaround? Thanks
With Chrome on Android, MediaElementSource is not currently routed to WebAudio. This is a known issue and is planned to be fixed eventually.
I am making an audio recorder using HTML5 and Javascript and do not want to include any third party API, I reached at my first step by creating an audio retriever and player using <audio> tag and navigator.webkitGetUserMedia Function which get audio from my microphone and play in through <audio> element but I am not able to get the audio data in an array at this point I don't know what to do which function to use.
simple just create a audio node, below is tweaked code from MattDiamond's RecorderJS:
function RecordAudio(stream, cfg){
var config = cfg || {};
var bufferLen = config.bufferLen || 4096;
var numChannels = config.numChannels || 2;
this.context = stream.context;
var recordBuffers = [];
var recording = false;
this.node = (this.context.createScriptProcessor ||
this.context.createJavaScriptNode).call(this.context,
bufferLen, numChannels, numChannels);
stream.connect(this.node);
this.node.connect(this.context.destination);
this.node.onaudioprocess = function(e){
if (!recording) return;
for (var i = 0; i < numChannels; i++){
if(!recordBuffers[i]) recordBuffers[i] = [];
recordBuffers[i].push.apply(recordBuffers[i], e.inputBuffer.getChannelData(i));
}
}
this.getData = function(){
var tmp = recordBuffers;
recordBuffers = [];
return tmp; // returns an array of array containing data from various channels
};
this.start() = function(){
recording = true;
};
this.stop() = function(){
recording = false;
};
}
example usage:
var recorder = new RecordAudio(userMedia);
recorder.start();
recorder.stop();
var recordedData = recorder.getData();
I have several tracks to a song that I want to play together and be able to mute some and play others. So I need to be able to start them all at the same time. Right now, they all start slightly out of sync:
// Start playing
for ( i = 0; i < 5; i++ ) {
tracks[i].audio.play();
}
Even this is apparently not fast enough to start them all at the same time.
Is there any way in javascript to guarantee that HTML5 audio tags will start playing simultaneously?
Not sure if you're already doing this, but Here's some sample code for preloading audio.
var audios = [];
var loading = 0;
AddNote("2C");
AddNote("2E");
AddNote("2G");
AddNote("3C");
function AddNote(name) {
loading++;
var audio = document.createElement("audio");
audio.loop = true;
audio.addEventListener("canplaythrough", function () {
loading--;
if (loading == 0) // All files are preloaded
StartPlayingAll();
}, false);
audio.src = "piano/" + name + ".mp3";
audios.push(audio);
}
function StartPlayingAll() {
for (var i = 0; i < audios.length; i++)
audios[i].play();
}
}
The other thing you can try is setting audio.currentTime on each of the tracks to manually sync up the audio.
You could use setTimeout to sync them after a brief delay in the beginning (you may want to wait for all the audio objects to load though).
JSFiddle: http://jsfiddle.net/bmAYb/35/
var au1 = document.getElementById('au1');
var au2 = document.getElementById('au2');
au1.volume = 1;
au2.volume = 0; //mute
au1.play();
au2.play();
var notfirstRun = false;
//sync for the first time
setTimeout(function() {
au2.currentTime = au1.currentTime;
au2.volume = 1;
}, 250);
My initial thought was to sync every x miliseconds using setInterval, but the audio pops when you do that if volume is set to 1 (audible).
My fiddle isn't totally in sync, but it's pretty close. You can get it 100% in sync but you either need to mute the audio on the other tracks or deal with popping.
The code (and music in the fiddle) are from Hungry Media.
I had the same issue and found a solution using the Audio API.
The problem is that the audio output has a delay of a few milliseconds, so it is impossible to start multiple audios at the same time. However, you can get around this by merging the audio sources into one using a ChannelMergerNode. By putting GainNodes in between, you can control the volume of each audio source separately.
I wrote a simple javascript class for this. This is how you can use it:
var audioMerger = new AudioMerger(["file1.ogg", "file2.mp3", "file3.mp3",
"file4.ogg", "file5.mp3"]);
audioMerger.onBuffered(() => audioMerger.play());
// Make sure it's always in sync (delay should be less than 50 ms)
setInterval(() => {
if (audioMerger.getDelay() >= 0.05) {
audioMerger.setTime(audioMerger.getTime());
}
}, 200);
// Set volume of 3rd audio to 50%
audioMerger.setVolume(0.5, 2);
// When you want to turn it off:
audioMerger.pause();
This code reduced the delay between the audios to less than 10 milliseconds in Firefox on my PC. This delay is so small you won't notice it. Unfortunately, it doesn't work in older browsers like Internet Explorer.
And here's the code for the class:
class AudioMerger {
constructor(files) {
this.files = files;
this.audios = files.map(file => new Audio(file));
var AudioContext = window.AudioContext || window.webkitAudioContext;
var ctx = new AudioContext();
this.merger = ctx.createChannelMerger(this.audios.length);
this.merger.connect(ctx.destination);
this.gains = this.audios.map(audio => {
var gain = ctx.createGain();
var source = ctx.createMediaElementSource(audio);
source.connect(gain);
gain.connect(this.merger);
return gain;
});
this.buffered = false;
var load = files.length;
this.audios.forEach(audio => {
audio.addEventListener("canplaythrough", () => {
load--;
if (load === 0) {
this.buffered = true;
if (this.bufferCallback != null) this.bufferCallback();
}
});
});
}
onBuffered(callback) {
if (this.buffered) callback();
else this.bufferCallback = callback;
}
play() {
this.audios.forEach(audio => audio.play());
}
pause() {
this.audios.forEach(audio => audio.pause());
}
getTime() {
return this.audios[0].currentTime;
}
setTime(time) {
this.audios.forEach(audio => audio.currentTime = time);
}
getDelay() {
var times = [];
for (var i = 0; i < this.audios.length; i++) {
times.push(this.audios[i].currentTime);
}
var minTime = Math.min.apply(Math, times);
var maxTime = Math.max.apply(Math, times);
return maxTime - minTime;
}
setVolume(volume, audioID) {
this.gains[audioID].gain.value = volume;
}
}