recorderJS record/download audio buffer WEB AUDIO API - javascript

I want to record the audio output from a simple drum sequencer and export it for download as a wav file. I have a live link to my current attempt at this implementation attempt.
The sum output of the sequencer is routed to the variable finalMixNode
yet setting this as the input for the recorder.js doesn't work. I think it may be a problem with the audio context but I can't figure it out. I successfully created a oscillator and recorded its output but I can't extend this to the sequencer.
Here is the main js code in which I am trying to record the output. I'm hoping someone will see what I am missing.
//audio node variables
var context;
var convolver;
var compressor;
var masterGainNode;
var effectLevelNode;
var lowPassFilterNode;
var noteTime;
var startTime;
var lastDrawTime = -1;
var LOOP_LENGTH = 16;
var rhythmIndex = 0;
var timeoutId;
var testBuffer = null;
var currentKit = null;
var reverbImpulseResponse = null;
var tempo = 120;
var TEMPO_MAX = 200;
var TEMPO_MIN = 40;
var TEMPO_STEP = 4;
if (window.hasOwnProperty('AudioContext') && !window.hasOwnProperty('webkitAudioContext')) {
window.webkitAudioContext = AudioContext;
}
$(function() {
init();
toggleSelectedListener();
playPauseListener();
lowPassFilterListener();
reverbListener();
createLowPassFilterSliders();
initializeTempo();
changeTempoListener();
});
function createLowPassFilterSliders() {
$("#freq-slider").slider({
value: 1,
min: 0,
max: 1,
step: 0.01,
disabled: true,
slide: changeFrequency
});
$("#quality-slider").slider({
value: 0,
min: 0,
max: 1,
step: 0.01,
disabled: true,
slide: changeQuality
});
}
function lowPassFilterListener() {
$('#lpf').click(function() {
$(this).toggleClass("active");
$(this).blur();
if ($(this).hasClass("btn-default")) {
$(this).removeClass("btn-default");
$(this).addClass("btn-warning");
lowPassFilterNode.active = true;
$("#freq-slider,#quality-slider").slider( "option", "disabled", false );
}
else {
$(this).addClass("btn-default");
$(this).removeClass("btn-warning");
lowPassFilterNode.active = false;
$("#freq-slider,#quality-slider").slider( "option", "disabled", true );
}
})
}
function reverbListener() {
$("#reverb").click(function() {
$(this).toggleClass("active");
$(this).blur();
if ($(this).hasClass("btn-default")) {
$(this).removeClass("btn-default");
$(this).addClass("btn-warning");
convolver.active = true;
}
else {
$(this).addClass("btn-default");
$(this).removeClass("btn-warning");
convolver.active = false;
}
})
}
function changeFrequency(event, ui) {
var minValue = 40;
var maxValue = context.sampleRate / 2;
var numberOfOctaves = Math.log(maxValue / minValue) / Math.LN2;
var multiplier = Math.pow(2, numberOfOctaves * (ui.value - 1.0));
lowPassFilterNode.frequency.value = maxValue * multiplier;
}
function changeQuality(event, ui) {
//30 is the quality multiplier, for now.
lowPassFilterNode.Q.value = ui.value * 30;
}
function playPauseListener() {
$('#play-pause').click(function() {
var $span = $(this).children("span");
if($span.hasClass('glyphicon-play')) {
$span.removeClass('glyphicon-play');
$span.addClass('glyphicon-pause');
handlePlay();
}
else {
$span.addClass('glyphicon-play');
$span.removeClass('glyphicon-pause');
handleStop();
}
});
}
function toggleSelectedListener() {
$('.pad').click(function() {
$(this).toggleClass("selected");
});
}
function init() {
initializeAudioNodes();
loadKits();
loadImpulseResponses();
}
function initializeAudioNodes() {
context = new webkitAudioContext();
var finalMixNode;
if (context.createDynamicsCompressor) {
// Create a dynamics compressor to sweeten the overall mix.
compressor = context.createDynamicsCompressor();
compressor.connect(context.destination);
finalMixNode = compressor;
} else {
// No compressor available in this implementation.
finalMixNode = context.destination;
}
// Create master volume.
// for now, the master volume is static, but in the future there will be a slider
masterGainNode = context.createGain();
masterGainNode.gain.value = 0.7; // reduce overall volume to avoid clipping
masterGainNode.connect(finalMixNode);
//connect all sounds to masterGainNode to play them
//don't need this for now, no wet dry mix for effects
// // Create effect volume.
// effectLevelNode = context.createGain();
// effectLevelNode.gain.value = 1.0; // effect level slider controls this
// effectLevelNode.connect(masterGainNode);
// Create convolver for effect
convolver = context.createConvolver();
convolver.active = false;
// convolver.connect(effectLevelNode);
//Create Low Pass Filter
lowPassFilterNode = context.createBiquadFilter();
//this is for backwards compatibility, the type used to be an integer
lowPassFilterNode.type = (typeof lowPassFilterNode.type === 'string') ? 'lowpass' : 0; // LOWPASS
//default value is max cutoff, or passing all frequencies
lowPassFilterNode.frequency.value = context.sampleRate / 2;
lowPassFilterNode.connect(masterGainNode);
lowPassFilterNode.active = false;
}
function loadKits() {
//name must be same as path
var kit = new Kit("TR808");
kit.load();
//TODO: figure out how to test if a kit is loaded
currentKit = kit;
}
function loadImpulseResponses() {
reverbImpulseResponse = new ImpulseResponse("sounds/impulse- responses/matrix-reverb2.wav");
reverbImpulseResponse.load();
}
//TODO delete this
function loadTestBuffer() {
var request = new XMLHttpRequest();
var url = "http://www.freesound.org/data/previews/102/102130_1721044-lq.mp3";
request.open("GET", url, true);
request.responseType = "arraybuffer";
request.onload = function() {
context.decodeAudioData(
request.response,
function(buffer) {
testBuffer = buffer;
},
function(buffer) {
console.log("Error decoding drum samples!");
}
);
}
request.send();
}
//TODO delete this
function sequencePads() {
$('.pad.selected').each(function() {
$('.pad').removeClass("selected");
$(this).addClass("selected");
});
}
function playNote(buffer, noteTime) {
var voice = context.createBufferSource();
voice.buffer = buffer;
var currentLastNode = masterGainNode;
if (lowPassFilterNode.active) {
lowPassFilterNode.connect(currentLastNode);
currentLastNode = lowPassFilterNode;
}
if (convolver.active) {
convolver.buffer = reverbImpulseResponse.buffer;
convolver.connect(currentLastNode);
currentLastNode = convolver;
}
voice.connect(currentLastNode);
voice.start(noteTime);
}
function schedule() {
var currentTime = context.currentTime;
// The sequence starts at startTime, so normalize currentTime so that it's 0 at the start of the sequence.
currentTime -= startTime;
while (noteTime < currentTime + 0.200) {
var contextPlayTime = noteTime + startTime;
var $currentPads = $(".column_" + rhythmIndex);
$currentPads.each(function() {
if ($(this).hasClass("selected")) {
var instrumentName = $(this).parents().data("instrument");
switch (instrumentName) {
case "kick":
playNote(currentKit.kickBuffer, contextPlayTime);
break;
case "snare":
playNote(currentKit.snareBuffer, contextPlayTime);
break;
case "hihat":
playNote(currentKit.hihatBuffer, contextPlayTime);
break;
case "tomhi":
playNote(currentKit.tomhiBuffer, contextPlayTime);
break;
case "tommid":
playNote(currentKit.tommidBuffer, contextPlayTime);
break;
case "tomlow":
playNote(currentKit.tomlowBuffer, contextPlayTime);
break;
case "cl":
playNote(currentKit.clBuffer, contextPlayTime);
break;
case "cb":
playNote(currentKit.cbBuffer, contextPlayTime);
break;
case "cp":
playNote(currentKit.cpBuffer, contextPlayTime);
break;
case "cy":
playNote(currentKit.cyBuffer, contextPlayTime);
break;
case "rs":
playNote(currentKit.rsBuffer, contextPlayTime);
break;
}
//play the buffer
//store a data element in the row that tells you what instrument
}
});
if (noteTime != lastDrawTime) {
lastDrawTime = noteTime;
drawPlayhead(rhythmIndex);
}
advanceNote();
}
timeoutId = requestAnimationFrame(schedule)
}
function drawPlayhead(xindex) {
var lastIndex = (xindex + LOOP_LENGTH - 1) % LOOP_LENGTH;
//can change this to class selector to select a column
var $newRows = $('.column_' + xindex);
var $oldRows = $('.column_' + lastIndex);
$newRows.addClass("playing");
$oldRows.removeClass("playing");
}
function advanceNote() {
// Advance time by a 16th note...
// var secondsPerBeat = 60.0 / theBeat.tempo;
//TODO CHANGE TEMPO HERE, convert to float
tempo = Number($("#tempo-input").val());
var secondsPerBeat = 60.0 / tempo;
rhythmIndex++;
if (rhythmIndex == LOOP_LENGTH) {
rhythmIndex = 0;
}
//0.25 because each square is a 16th note
noteTime += 0.25 * secondsPerBeat
// if (rhythmIndex % 2) {
// noteTime += (0.25 + kMaxSwing * theBeat.swingFactor) * secondsPerBeat;
// } else {
// noteTime += (0.25 - kMaxSwing * theBeat.swingFactor) * secondsPerBeat;
// }
}
function handlePlay(event) {
rhythmIndex = 0;
noteTime = 0.0;
startTime = context.currentTime + 0.005;
schedule();
}
function handleStop(event) {
cancelAnimationFrame(timeoutId);
$(".pad").removeClass("playing");
}
function initializeTempo() {
$("#tempo-input").val(tempo);
}
function changeTempoListener() {
$("#increase-tempo").click(function() {
if (tempo < TEMPO_MAX) {
tempo += TEMPO_STEP;
$("#tempo-input").val(tempo);
}
});
$("#decrease-tempo").click(function() {
if (tempo > TEMPO_MIN) {
tempo -= TEMPO_STEP;
$("#tempo-input").val(tempo);
}
});
}
function __log(e, data) {
log.innerHTML += "\n" + e + " " + (data || '');
}
var audio_context;
var recorder;
function startUserMedia() {
var input = finalMixNode;
__log('Media stream created.');
input.start();
__log('Input connected to audio context destination.');
recorder = new Recorder(input);
__log('Recorder initialised.');
}
function startRecording(button) {
recorder && recorder.record();
button.disabled = true;
button.nextElementSibling.disabled = false;
__log('Recording...');
}
function stopRecording(button) {
recorder && recorder.stop();
button.disabled = true;
button.previousElementSibling.disabled = false;
__log('Stopped recording.');
// create WAV download link using audio data blob
createDownloadLink();
recorder.clear();
}
function createDownloadLink() {
recorder && recorder.exportWAV(function(blob) {
var url = URL.createObjectURL(blob);
var li = document.createElement('li');
var au = document.createElement('audio');
var hf = document.createElement('a');
au.controls = true;
au.src = url;
hf.href = url;
hf.download = new Date().toISOString() + '.wav';
hf.innerHTML = hf.download;
li.appendChild(au);
li.appendChild(hf);
recordingslist.appendChild(li);
});
}
window.onload = function init() {
try {
// webkit shim
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
window.URL = window.URL || window.webkitURL;
// audio_context = new AudioContext;
__log('Audio context set up.');
} catch (e) {
alert('No web audio support in this browser!');
}
startUserMedia();
};

Your finalMixNode is scoped into the initializeAudioNodes() function, therefore it is undefined when you call it from startUserMedia().
Also, this variable is either a dynamicCompressor node or the AudioContext's destination.
Recorderjs needs a node with an output (which AudioDestinationNode doesn't have currently1 ) so make sure to construct your recorder with the final compressor node (or a final gainNode)
Executing this in my js console from your page does work :
var recorder = new Recorder(compressor);
1 Thanks #padenot for noticing me it's being discussed here

Related

"Cannot read property" error while clicking Start Recording Button

I get a console error while clicking a record Button.
I worked with the Code From RTCMulticonnection to establish a connection and now want to have the ability to record the stream. I used this demo to record the stream:
RecordRTC-and-RTCMultiConnection
https://github.com/muaz-khan/WebRTC-Experiment/blob/d1040f7dcd47c290f99ad51f9b3d57aa40c847c4/RTCMultiConnection/v2.2.2/demos/RecordRTC-and-RTCMultiConnection.html
The Console Message
Uncaught TypeError: Cannot read property 'YvuytsjuZSjrg1Wp9xa4jAXrEC783kpnW74r' of undefined
at HTMLButtonElement.button.onclick (index.html:108)
What I see is that in the demo version all streamid's have a short ID like
id="1mnvuzts2dm". My Version sets the streamid's to longer random strings like the one above in the Error id="YvuytsjuZSjrg1Wp9xa4jAXrEC783kpnW74r"
//RECORD THE CONVERSATION AUDIO + VIDEO
connection.onstream = function(e) {
appendVideo(e.mediaElement, e.streamid);
};
function appendVideo(video, streamid) {
video.width = 600;
video = getVideo(video, streamid);
videosContainer.appendChild(video);
rotateVideo(video);
scaleVideos();
}
function getVideo(video, streamid) {
var div = document.createElement('div');
div.className = 'video-container';
var button = document.createElement('button');
button.id = streamid;
button.innerHTML = 'Start Recording';
button.onclick = function() {
this.disabled = true;
if (this.innerHTML == 'Start Recording') {
this.innerHTML = 'Stop Recording';
connection.streams[this.id].startRecording({
audio: true,
video: true
});
} else {
this.innerHTML = 'Start Recording';
var stream = connection.streams[this.id];
stream.stopRecording(function(blob) {
var h2;
if (blob.audio && !(connection.UA.Chrome && stream.type == 'remote')) {
h2 = document.createElement('h2');
h2.innerHTML = 'Open recorded ' + blob.audio.type + '';
div.appendChild(h2);
}
if (blob.video) {
h2 = document.createElement('h2');
h2.innerHTML = 'Open recorded ' + blob.video.type + '';
div.appendChild(h2);
}
});
}
setTimeout(function() {
button.disabled = false;
}, 3000);
};
div.appendChild(button);
div.appendChild(video);
return div;
}
function rotateVideo(mediaElement) {
mediaElement.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(0deg)';
setTimeout(function() {
mediaElement.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(360deg)';
}, 1000);
}
connection.onstreamended = function(e) {
var div = e.mediaElement.parentNode;
div.style.opacity = 0;
rotateVideo(div);
setTimeout(function() {
if (div.parentNode) {
div.parentNode.removeChild(div);
}
scaleVideos();
}, 1000);
};
function scaleVideos() {
var videos = document.querySelectorAll('video'),
length = videos.length,
video;
var minus = 130;
var windowHeight = 700;
var windowWidth = 600;
var windowAspectRatio = windowWidth / windowHeight;
var videoAspectRatio = 4 / 3;
var blockAspectRatio;
var tempVideoWidth = 0;
var maxVideoWidth = 0;
for (var i = length; i > 0; i--) {
blockAspectRatio = i * videoAspectRatio / Math.ceil(length / i);
if (blockAspectRatio <= windowAspectRatio) {
tempVideoWidth = videoAspectRatio * windowHeight / Math.ceil(length / i);
} else {
tempVideoWidth = windowWidth / i;
}
if (tempVideoWidth > maxVideoWidth)
maxVideoWidth = tempVideoWidth;
}
for (var i = 0; i < length; i++) {
video = videos[i];
if (video)
video.width = maxVideoWidth - minus;
}
}
The ERROR Line is
connection.streams[this.id].startRecording({
You can use RecordRTC library to record the streams.
You just need to use this code in your primary page
and attach recordRTC library to your page.
recorder = connection.recorder;
if(!recorder)
{
recorder = RecordRTC([event.stream], {
type: 'video'
});
connection.recorder = recorder;
}
else {
recorder.getInternalRecorder().addStreams([event.stream]);
}
recorder.videoWidth = 500;
recorder.videoHeight = 500;
if(!connection.recorder.streams) {
connection.recorder.streams = [];
}
connection.recorder.streams.push(event.stream);
var length = connection.recorder.streams.length;
if(length > 2){
length = 2;
}
recordingStatus.innerHTML = 'Recording Started ' + length + ' streams';
Or you can use this code
connection.onstream = function(event) {
var video = document.createElement('video');
try {
video.setAttributeNode(document.createAttribute('autoplay'));
video.setAttributeNode(document.createAttribute('playsinline'));
} catch (e) {
video.setAttribute('autoplay', true);
video.setAttribute('playsinline', true);
}
if(event.type === 'local') {
video.volume = 0;
try {
video.setAttributeNode(document.createAttribute('muted'));
} catch (e) {
video.setAttribute('muted', true);
}
}
video.srcObject = event.stream;
var width = parseInt(connection.videosContainer.clientWidth / 3) - 20;
var mediaElement = getHTMLMediaElement(video, {
title: event.userid,
buttons: ['full-screen'],
width: width,
showOnMouseEnter: false
});
connection.videosContainer.appendChild(mediaElement);
setTimeout(function() {
mediaElement.media.play();
}, 5000);
mediaElement.id = event.streamid;
// to keep room-id in cache
localStorage.setItem(connection.socketMessageEvent, connection.sessionid);
if(chkRecordConference.checked === true) {
chkRecordConference.parentNode.style.display = 'none';
btnStopRecording.style.display = 'inline-block';
recordingStatus.style.display = 'inline-block';
recorder = connection.recorder;
if(!recorder)
{
recorder = RecordRTC([event.stream], {
type: 'video'
});
connection.recorder = recorder;
}
else {
recorder.getInternalRecorder().addStreams([event.stream]);
}
recorder.videoWidth = 500;
recorder.videoHeight = 500;
if(!connection.recorder.streams) {
connection.recorder.streams = [];
}
connection.recorder.streams.push(event.stream);
var length = connection.recorder.streams.length;
if(length > 2){
length = 2;
}
recordingStatus.innerHTML = 'Recording Started ' + length + ' streams';
}

Possible memory leak or something else?

I've made a visualizer in javascript that when you select a music directory your're able to select files within that directory to play and have the visualizer move to. But it would seem after loading a directory and then changing the song more than 4 times Results in less responsive movement from the visualizer. I'm unsure why this is happening.
Heres an example of this happening.
Keep changing the song from the drop down box until you see it starting to slow.
window.onload = function() {
var input = document.getElementById("file");
var audio = document.getElementById("audio");
var selectLabel = document.querySelector("label[for=select]");
var audioLabel = document.querySelector("label[for=audio]");
var select = document.querySelector("select");
var context = void 0,
src = void 0,
res = [],
url = "";
function processDirectoryUpload(event) {
var webkitResult = [];
var mozResult = [];
var files;
console.log(event);
select.innerHTML = "";
// do mozilla stuff
function mozReadDirectories(entries, path) {
console.log("dir", entries, path);
return [].reduce.call(entries, function(promise, entry) {
return promise.then(function() {
return Promise.resolve(entry.getFilesAndDirectories() || entry)
.then(function(dir) {
return dir
})
})
}, Promise.resolve())
.then(function(items) {
var dir = items.filter(function(folder) {
return folder instanceof Directory
});
var files = items.filter(function(file) {
return file instanceof File
});
if (files.length) {
// console.log("files:", files, path);
mozResult = mozResult.concat.apply(mozResult, files);
}
if (dir.length) {
// console.log(dir, dir[0] instanceof Directory);
return mozReadDirectories(dir, dir[0].path || path);
} else {
if (!dir.length) {
return Promise.resolve(mozResult).then(function(complete) {
return complete
})
}
}
})
};
function handleEntries(entry) {
let file = "webkitGetAsEntry" in entry ? entry.webkitGetAsEntry() : entry
return Promise.resolve(file);
}
function handleFile(entry) {
return new Promise(function(resolve) {
if (entry.isFile) {
entry.file(function(file) {
listFile(file, entry.fullPath).then(resolve)
})
} else if (entry.isDirectory) {
var reader = entry.createReader();
reader.readEntries(webkitReadDirectories.bind(null, entry, handleFile, resolve))
} else {
var entries = [entry];
return entries.reduce(function(promise, file) {
return promise.then(function() {
return listDirectory(file)
})
}, Promise.resolve())
.then(function() {
return Promise.all(entries.map(function(file) {
return listFile(file)
})).then(resolve)
})
}
})
function webkitReadDirectories(entry, callback, resolve, entries) {
console.log(entries);
return listDirectory(entry).then(function(currentDirectory) {
console.log(`iterating ${currentDirectory.name} directory`, entry);
return entries.reduce(function(promise, directory) {
return promise.then(function() {
return callback(directory)
});
}, Promise.resolve())
}).then(resolve);
}
}
function listDirectory(entry) {
console.log(entry);
return Promise.resolve(entry);
}
function listFile(file, path) {
path = path || file.webkitRelativePath || "/" + file.name;
console.log(`reading ${file.name}, size: ${file.size}, path:${path}`);
webkitResult.push(file);
return Promise.resolve(webkitResult)
};
function processFiles(files) {
Promise.all([].map.call(files, function(file, index) {
return handleEntries(file, index).then(handleFile)
}))
.then(function() {
console.log("complete", webkitResult);
res = webkitResult;
res.reduce(function(promise, track) {
return promise.then(function() {
return playMusic(track)
})
}, displayFiles(res))
})
.catch(function(err) {
alert(err.message);
})
}
if ("getFilesAndDirectories" in event.target) {
return (event.type === "drop" ? event.dataTransfer : event.target).getFilesAndDirectories()
.then(function(dir) {
if (dir[0] instanceof Directory) {
console.log(dir)
return mozReadDirectories(dir, dir[0].path || path)
.then(function(complete) {
console.log("complete:", webkitResult);
event.target.value = null;
});
} else {
if (dir[0] instanceof File && dir[0].size > 0) {
return Promise.resolve(dir)
.then(function() {
console.log("complete:", mozResult);
res = mozResult;
res.reduce(function(promise, track) {
return promise.then(function() {
return playMusic(track)
})
}, displayFiles(res))
})
} else {
if (dir[0].size == 0) {
throw new Error("could not process '" + dir[0].name + "' directory" + " at drop event at firefox, upload folders at 'Choose folder...' input");
}
}
}
}).catch(function(err) {
alert(err)
})
}
files = event.target.files;
if (files) {
processFiles(files)
}
}
function displayFiles(files) {
select.innerHTML = "";
return Promise.all(files.map(function(file, index) {
return new Promise(function(resolve) {
if (/^audio/.test(file.type)) { /* do stuff, that is all code currently within Promise resolver function */ } else { /* proceed to next file */
resolve()
}
var option = new Option(file.name, index);
select.appendChild(option);
resolve()
})
}))
}
function handleSelectedSong(event) {
if (res.length) {
var index = select.value;
var track = res[index];
playMusic(track)
.then(function(filename) {
console.log(filename + " playback completed")
})
} else {
console.log("No songs to play")
}
}
function playMusic(file) {
return new Promise(function(resolve) {
audio.pause();
audio.onended = function() {
audio.onended = null;
if (url) URL.revokeObjectURL(url);
resolve(file.name);
}
if (url) URL.revokeObjectURL(url);
url = URL.createObjectURL(file);
audio.load();
audio.src = url;
audio.play();
audioLabel.textContent = file.name;
context = context || new AudioContext();
src = src || context.createMediaElementSource(audio);
src.disconnect(context);
var analyser = context.createAnalyser();
var canvas = document.getElementById("canvas");
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
var ctx = canvas.getContext("2d");
src.connect(analyser);
analyser.connect(context.destination);
analyser.fftSize = 16384;
var bufferLength = analyser.frequencyBinCount;
console.log(bufferLength);
var dataArray = new Uint8Array(bufferLength);
var WIDTH = canvas.width;
var HEIGHT = canvas.height;
var barWidth = (WIDTH / bufferLength) * 32;
var barHeight;
var x = 0;
function renderFrame() {
requestAnimationFrame(renderFrame);
x = 0;
analyser.getByteFrequencyData(dataArray);
ctx.fillStyle = "#1b1b1b";
ctx.fillRect(0, 0, WIDTH, HEIGHT);
for (var i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
ctx.fillStyle = "rgb(5,155,45)"
ctx.fillRect(x, (((HEIGHT - barHeight - 5 % barHeight) + (20 % HEIGHT - barHeight))), barWidth, barHeight + 20 % HEIGHT);
x += barWidth + 2;
}
}
renderFrame();
})
}
input.addEventListener("change", processDirectoryUpload);
select.addEventListener("change", handleSelectedSong);
}
<canvas id="canvas" width="window.innerWidth" height="window.innerHeight"></canvas>
<div id="content">
<label class="custom-file-upload">
Select Music directory <input id="file" type="file" accept="audio/*" directory allowdirs webkitdirectory/>
<p style="color: rgb(5,195,5);">Now playing:<label for="audio"></label></p>
<p style="color: rgb(5,195,5);">Select Song</p>
<select id="select">
</select>
<audio id="audio" controls></audio>
Like noted by yuriy636, you are starting a new animation for every new song, without never stopping the previous one.
So when you played 5 songs, you still have 5 visualizations rendering loop running at every frame, and 5 analyzers.
The best to do here is to refactor your code :
create a single analyzer, only update which stream feeds it
make the canvas animation autonomous, declare it once at first load
since you've got only one <canvas>, start only one rendering animation
When using a single analyzer, your render doesn't use anything new when you change the source, it's always the same canvas, the same analyzer, the same visualization.
Here is a quick proof of concept, really dirty, but I hope you'll be able to undertstand what I did and why.
window.onload = function() {
var input = document.getElementById("file");
var audio = document.getElementById("audio");
var selectLabel = document.querySelector("label[for=select]");
var audioLabel = document.querySelector("label[for=audio]");
var select = document.querySelector("select");
var viz = null;
// removed all the IDK what it was meant for directory special handlers
function displayFiles() {
select.innerHTML = "";
// that's all synchronous, why Promises ?
res = Array.prototype.slice.call(input.files);
res.forEach(function(file, index) {
if (/^audio/.test(file.type)) {
var option = new Option(file.name, index);
select.appendChild(option);
}
});
if (res.length) {
var analyser = initAudioAnalyser();
viz = initVisualization(analyser);
// pre-select the first song ?
handleSelectedSong();
audio.pause();
}
}
function handleSelectedSong(event) {
if (res.length) {
var index = select.value;
var track = res[index];
playMusic(track)
.then(function(filename) {
console.log(filename + " playback completed")
})
viz.play();
} else {
console.log("No songs to play")
}
}
function playMusic(file) {
return new Promise(function(resolve) {
var url = audio.src;
audio.pause();
audio.onended = function() {
audio.onended = null;
// arguablily useless here since blobURIs are just pointers to real file on the user's system
if (url) URL.revokeObjectURL(url);
resolve(file.name);
}
if (url) URL.revokeObjectURL(url);
url = URL.createObjectURL(file);
// audio.load(); // would just set a 404 since you revoked the URL just before
audio.src = url;
audio.play();
audioLabel.textContent = file.name;
});
}
function initAudioAnalyser() {
var context = new AudioContext();
var analyser = context.createAnalyser();
analyser.fftSize = 16384;
var src = context.createMediaElementSource(audio);
src.connect(analyser);
src.connect(context.destination);
return analyser;
}
function initVisualization(analyser) {
var canvas = document.getElementById("canvas");
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
var ctx = canvas.getContext("2d");
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
var WIDTH = canvas.width;
var HEIGHT = canvas.height;
var barWidth = (WIDTH / bufferLength) * 32;
var barHeight;
var x = 0;
var paused = true;
function renderFrame() {
if (!paused) {
requestAnimationFrame(renderFrame);
} else {
return;
}
x = 0;
analyser.getByteFrequencyData(dataArray);
ctx.fillStyle = "#1b1b1b";
ctx.fillRect(0, 0, WIDTH, HEIGHT);
ctx.fillStyle = "rgb(5,155,45)"
ctx.beginPath();
for (var i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
// micro-optimisation, but concatenating all the rects in a single shape is easier for the CPU
ctx.rect(x, (((HEIGHT - barHeight - 5 % barHeight) + (20 % HEIGHT - barHeight))), barWidth, barHeight + 20 % HEIGHT);
x += barWidth + 2;
}
ctx.fill();
}
var viz = window.viz = {
play: function() {
if(paused){
paused = false;
renderFrame();
}
},
pause: function() {
paused = true;
clearTimeout(pauseTimeout);
pauseTimeout = null;
},
};
// we can even add auto pause linked to the audio element
var pauseTimeout = null;
audio.onpause = function() {
// let's really do it in 2s to keep the tear down effect
pauseTimeout = setTimeout(viz.pause, 2000);
}
audio.onplaying = function() {
clearTimeout(pauseTimeout);
// we were not playing
if(!pauseTimeout){
viz.play();
}
}
return viz;
}
input.addEventListener("change", displayFiles);
select.addEventListener("change", handleSelectedSong);
}
<canvas id="canvas" width="window.innerWidth" height="window.innerHeight"></canvas>
<div id="content">
<label class="custom-file-upload">
Select Music directory <input id="file" type="file" accept="audio/*" directory allowdirs webkitdirectory/>
<p style="color: rgb(5,195,5);">Now playing:<label for="audio"></label></p>
<p style="color: rgb(5,195,5);">Select Song</p>
<select id="select">
</select>
<audio id="audio" controls></audio>

Send base64/byte array of Gif Animation from server side and display it on canvas in client side

I built an interface that calls a web API in asp.net (i use c# and javascript/ajax to implement that).
The client side call to the controller, the controller needs to create animation gif and send it back to the client side by a string of base64 or byte array, when the client side gets the base64 he should display it into a canvas.
Now the problem is that the canvas display only the first frame of the animation gif like a static image.
I already read a lot on the internet and find this:
How Do I Convert A GIF Animation To Base64 String And Back To A GIF Animation?
But it's not helped me because I don't want to save the image on the disc just to display it on the client side.
*Note that when I save the image from server side on my disc its save it as gif and display all frames together like I wish, something wrong when I transfer it to client side.
*I use ImageMagick to create the animated gif.
Here is my client side code:
<!DOCTYPE html>
<html>
<head>
<title></title>
<meta charset="utf-8" />
<link href="Content/bootstrap.min.css" rel="stylesheet" />
</head>
<body style="padding-top: 20px;">
<div class="col-md-10 col-md-offset-1">
<div class="well">
<!---->
<canvas id="canvasImage" width="564" height="120">
<p>We apologize, your browser does not support canvas at this time!</p>
</canvas>
<!---->
</div>
</div>
<script src="Scripts/jquery-1.10.2.min.js"></script>
<script src="Scripts/bootstrap.min.js"></script>
<script type="text/javascript">
$(document).ready(function () {
$.ajax({
url: '/api/EngineProccess',
method: 'GET',
success: function (data) {
var imageObj = new Image();
var canvas = document.getElementById("canvasImage");
var context = canvas.getContext('2d');
var image = new Image();
image.onload = function () {
context.drawImage(image, 0, 0);
};
console.log(data);
image.src = "data:image/gif;base64," + data;
},
error: function (jqXHR) {
$('#divErrorText').text(jqXHR.responseText);
$('#divError').show('fade');
}
});
});
</script>
</body>
</html>
and here is the server code:
public class EngineProccessController : ApiController
{
// GET api/EngineProccess
public String Get()
{
using (MagickImageCollection collection = new MagickImageCollection())
{
// Add first image and set the animation delay to 100ms
collection.Add("Snakeware1.gif");
collection[0].AnimationDelay = 100;
// Add second image, set the animation delay to 100ms and flip the image
collection.Add("Snakeware2.gif");
collection[1].AnimationDelay = 100;
collection[1].Flip();
// Optionally reduce colors
QuantizeSettings settings = new QuantizeSettings();
settings.Colors = 256;
collection.Quantize(settings);
// Optionally optimize the images (images should have the same size).
collection.Optimize();
// Save gif
//collection.Write("D://Test01//Test01//Animated.gif");
string data = collection.ToBase64();
return data;
}
}
}
Any ideas?
Please help.
Edit: after some days i found the problem, i use magicimage (magic.net) to create the gif animaited, the base64 was ok but the problem was in the canvas element, the canvas didnt display the animation likei want so i changed the element canvas to be an regular image element () and the changed the src of the image dynamic.
Regards,
Jr.Rafa
Example loading playing gif on canvas.
Sorry but just under 30K answer limit, code and comment very cut down to fit. Ask in comments if needed. See bottom of snippet on basic usage.
/*
The code was created as to the specifications set out in https://www.w3.org/Graphics/GIF/spec-gif89a.txt
The document states usage conditions
"The Graphics Interchange Format(c) is the Copyright property of
CompuServe Incorporated. GIF(sm) is a Service Mark property of
CompuServe Incorporated."
https://en.wikipedia.org/wiki/GIF#Unisys_and_LZW_patent_enforcement last paragraph
Additional sources
https://en.wikipedia.org/wiki/GIF
https://www.w3.org/Graphics/GIF/spec-gif87.txt
*/
var GIF = function () {
var timerID;
var st;
var interlaceOffsets = [0, 4, 2, 1]; // used in de-interlacing.
var interlaceSteps = [8, 8, 4, 2];
var interlacedBufSize = undefined;
var deinterlaceBuf = undefined;
var pixelBufSize = undefined;
var pixelBuf = undefined;
const GIF_FILE = {
GCExt : 0xF9,
COMMENT : 0xFE,
APPExt : 0xFF,
UNKNOWN : 0x01,
IMAGE : 0x2C,
EOF : 59,
EXT : 0x21,
};
var Stream = function (data) { // simple buffered stream
this.data = new Uint8ClampedArray(data);
this.pos = 0;
var len = this.data.length;
this.getString = function (count) {
var s = "";
while (count--) {
s += String.fromCharCode(this.data[this.pos++]);
}
return s;
};
this.readSubBlocks = function () {
var size, count, data;
data = "";
do {
count = size = this.data[this.pos++];
while (count--) {
data += String.fromCharCode(this.data[this.pos++]);
}
} while (size !== 0 && this.pos < len);
return data;
}
this.readSubBlocksB = function () { // reads a set of blocks as binary
var size, count, data;
data = [];
do {
count = size = this.data[this.pos++];
while (count--) {
data.push(this.data[this.pos++]);
}
} while (size !== 0 && this.pos < len);
return data;
}
};
// LZW decoder uncompressed each frame's pixels
var lzwDecode = function (minSize, data) {
var i, pixelPos, pos, clear, eod, size, done, dic, code, last, d, len;
pos = 0;
pixelPos = 0;
dic = [];
clear = 1 << minSize;
eod = clear + 1;
size = minSize + 1;
done = false;
while (!done) {
last = code;
code = 0;
for (i = 0; i < size; i++) {
if (data[pos >> 3] & (1 << (pos & 7))) {
code |= 1 << i;
}
pos++;
}
if (code === clear) { // clear and reset the dictionary
dic = [];
size = minSize + 1;
for (i = 0; i < clear; i++) {
dic[i] = [i];
}
dic[clear] = [];
dic[eod] = null;
continue;
}
if (code === eod) { // end of data
done = true;
return;
}
if (code >= dic.length) {
dic.push(dic[last].concat(dic[last][0]));
} else
if (last !== clear) {
dic.push(dic[last].concat(dic[code][0]));
}
d = dic[code];
len = d.length;
for (i = 0; i < len; i++) {
pixelBuf[pixelPos++] = d[i];
}
if (dic.length === (1 << size) && size < 12) {
size++;
}
}
};
var parseColourTable = function (count) { // get a colour table of length count
// Each entry is 3 bytes, for RGB.
var colours = [];
for (var i = 0; i < count; i++) {
colours.push([st.data[st.pos++], st.data[st.pos++], st.data[st.pos++]]);
}
return colours;
};
var parse = function () { // read the header. This is the starting point of the decode and async calls parseBlock
var bitField;
st.pos += 6; // skip the first stuff see GifEncoder for details
gif.width = (st.data[st.pos++]) + ((st.data[st.pos++]) << 8);
gif.height = (st.data[st.pos++]) + ((st.data[st.pos++]) << 8);
bitField = st.data[st.pos++];
gif.colorRes = (bitField & 0b1110000) >> 4;
gif.globalColourCount = 1 << ((bitField & 0b111) + 1);
gif.bgColourIndex = st.data[st.pos++];
st.pos++; // ignoring pixel aspect ratio. if not 0, aspectRatio = (pixelAspectRatio + 15) / 64
if (bitField & 0b10000000) { // global colour flag
gif.globalColourTable = parseColourTable(gif.globalColourCount);
}
setTimeout(parseBlock, 0);
};
var parseAppExt = function () { // get application specific data. Netscape added iterations and terminator. Ignoring that
st.pos += 1;
if ('NETSCAPE' === st.getString(8)) {
st.pos += 8; // ignoring this data. iterations (word) and terminator (byte)
} else {
st.pos += 3; // 3 bytes of string usually "2.0" when identifier is NETSCAPE
st.readSubBlocks(); // unknown app extension
}
};
var parseGCExt = function () { // get GC data
var bitField;
st.pos++;
bitField = st.data[st.pos++];
gif.disposalMethod = (bitField & 0b11100) >> 2;
gif.transparencyGiven = bitField & 0b1 ? true : false; // ignoring bit two that is marked as userInput???
gif.delayTime = (st.data[st.pos++]) + ((st.data[st.pos++]) << 8);
gif.transparencyIndex = st.data[st.pos++];
st.pos++;
};
var parseImg = function () { // decodes image data to create the indexed pixel image
var deinterlace, frame, bitField;
deinterlace = function (width) { // de interlace pixel data if needed
var lines, fromLine, pass, toline;
lines = pixelBufSize / width;
fromLine = 0;
if (interlacedBufSize !== pixelBufSize) {
deinterlaceBuf = new Uint8Array(pixelBufSize);
interlacedBufSize = pixelBufSize;
}
for (pass = 0; pass < 4; pass++) {
for (toLine = interlaceOffsets[pass]; toLine < lines; toLine += interlaceSteps[pass]) {
deinterlaceBuf.set(pixelBuf.subArray(fromLine, fromLine + width), toLine * width);
fromLine += width;
}
}
};
frame = {}
gif.frames.push(frame);
frame.disposalMethod = gif.disposalMethod;
frame.time = gif.length;
frame.delay = gif.delayTime * 10;
gif.length += frame.delay;
if (gif.transparencyGiven) {
frame.transparencyIndex = gif.transparencyIndex;
} else {
frame.transparencyIndex = undefined;
}
frame.leftPos = (st.data[st.pos++]) + ((st.data[st.pos++]) << 8);
frame.topPos = (st.data[st.pos++]) + ((st.data[st.pos++]) << 8);
frame.width = (st.data[st.pos++]) + ((st.data[st.pos++]) << 8);
frame.height = (st.data[st.pos++]) + ((st.data[st.pos++]) << 8);
bitField = st.data[st.pos++];
frame.localColourTableFlag = bitField & 0b10000000 ? true : false;
if (frame.localColourTableFlag) {
frame.localColourTable = parseColourTable(1 << ((bitField & 0b111) + 1));
}
if (pixelBufSize !== frame.width * frame.height) { // create a pixel buffer if not yet created or if current frame size is different from previous
pixelBuf = new Uint8Array(frame.width * frame.height);
pixelBufSize = frame.width * frame.height;
}
lzwDecode(st.data[st.pos++], st.readSubBlocksB()); // decode the pixels
if (bitField & 0b1000000) { // de interlace if needed
frame.interlaced = true;
deinterlace(frame.width);
} else {
frame.interlaced = false;
}
processFrame(frame); // convert to canvas image
};
var processFrame = function (frame) {
var ct, cData, dat, pixCount, ind, useT, i, pixel, pDat, col, frame, ti;
frame.image = document.createElement('canvas');
frame.image.width = gif.width;
frame.image.height = gif.height;
frame.image.ctx = frame.image.getContext("2d");
ct = frame.localColourTableFlag ? frame.localColourTable : gif.globalColourTable;
if (gif.lastFrame === null) {
gif.lastFrame = frame;
}
useT = (gif.lastFrame.disposalMethod === 2 || gif.lastFrame.disposalMethod === 3) ? true : false;
if (!useT) {
frame.image.ctx.drawImage(gif.lastFrame.image, 0, 0, gif.width, gif.height);
}
cData = frame.image.ctx.getImageData(frame.leftPos, frame.topPos, frame.width, frame.height);
ti = frame.transparencyIndex;
dat = cData.data;
if (frame.interlaced) {
pDat = deinterlaceBuf;
} else {
pDat = pixelBuf;
}
pixCount = pDat.length;
ind = 0;
for (i = 0; i < pixCount; i++) {
pixel = pDat[i];
col = ct[pixel];
if (ti !== pixel) {
dat[ind++] = col[0];
dat[ind++] = col[1];
dat[ind++] = col[2];
dat[ind++] = 255; // Opaque.
} else
if (useT) {
dat[ind + 3] = 0; // Transparent.
ind += 4;
} else {
ind += 4;
}
}
frame.image.ctx.putImageData(cData, frame.leftPos, frame.topPos);
gif.lastFrame = frame;
if (!gif.waitTillDone && typeof gif.onload === "function") { // if !waitTillDone the call onload now after first frame is loaded
doOnloadEvent();
}
};
var finnished = function () { // called when the load has completed
gif.loading = false;
gif.frameCount = gif.frames.length;
gif.lastFrame = null;
st = undefined;
gif.complete = true;
gif.disposalMethod = undefined;
gif.transparencyGiven = undefined;
gif.delayTime = undefined;
gif.transparencyIndex = undefined;
gif.waitTillDone = undefined;
pixelBuf = undefined; // dereference pixel buffer
deinterlaceBuf = undefined; // dereference interlace buff (may or may not be used);
pixelBufSize = undefined;
deinterlaceBuf = undefined;
gif.currentFrame = 0;
if (gif.frames.length > 0) {
gif.image = gif.frames[0].image;
}
doOnloadEvent();
if (typeof gif.onloadall === "function") {
(gif.onloadall.bind(gif))({
type : 'loadall',
path : [gif]
});
}
if (gif.playOnLoad) {
gif.play();
}
}
var canceled = function () { // called if the load has been cancelled
finnished();
if (typeof gif.cancelCallback === "function") {
(gif.cancelCallback.bind(gif))({
type : 'canceled',
path : [gif]
});
}
}
var parseExt = function () { // parse extended blocks
switch (st.data[st.pos++]) {
case GIF_FILE.GCExt:
parseGCExt();
break;
case GIF_FILE.COMMENT:
gif.comment += st.readSubBlocks(); // found a comment field
break;
case GIF_FILE.APPExt:
parseAppExt();
break;
case GIF_FILE.UNKNOWN: // not keeping this data
st.pos += 13; // deliberate fall through to default
default: // not keeping this if it happens
st.readSubBlocks();
break;
}
}
var parseBlock = function () { // parsing the blocks
if (gif.cancel !== undefined && gif.cancel === true) {
canceled();
return;
}
switch (st.data[st.pos++]) {
case GIF_FILE.IMAGE: // image block
parseImg();
if (gif.firstFrameOnly) {
finnished();
return;
}
break;
case GIF_FILE.EOF: // EOF found so cleanup and exit.
finnished();
return;
case GIF_FILE.EXT: // extend block
default:
parseExt();
break;
}
if (typeof gif.onprogress === "function") {
gif.onprogress({
bytesRead : st.pos,
totalBytes : st.data.length,
frame : gif.frames.length
});
}
setTimeout(parseBlock, 0);
};
var cancelLoad = function (callback) {
if (gif.complete) {
return false;
}
gif.cancelCallback = callback;
gif.cancel = true;
return true;
}
var error = function (type) {
if (typeof gif.onerror === "function") {
(gif.onerror.bind(this))({
type : type,
path : [this]
});
}
gif.onerror = undefined;
gif.onload = undefined;
gif.loading = false;
}
var doOnloadEvent = function () { // fire onload event if set
gif.currentFrame = 0;
gif.lastFrameAt = new Date().valueOf();
gif.nextFrameAt = new Date().valueOf();
if (typeof gif.onload === "function") {
(gif.onload.bind(gif))({
type : 'load',
path : [gif]
});
}
gif.onload = undefined;
gif.onerror = undefined;
}
var dataLoaded = function (data) {
st = new Stream(data);
parse();
}
var loadGif = function (filename) { // starts the load
var ajax = new XMLHttpRequest();
ajax.responseType = "arraybuffer";
ajax.onload = function (e) {
if (e.target.status === 400) {
error("Bad Request response code");
} else if (e.target.status === 404) {
error("File not found");
} else {
dataLoaded(ajax.response);
}
};
ajax.open('GET', filename, true);
ajax.send();
ajax.onerror = function (e) {
error("File error");
};
this.src = filename;
this.loading = true;
}
function play() { // starts play if paused
if (!gif.playing) {
gif.paused = false;
gif.playing = true;
playing();
}
}
function pause() { // stops play
gif.paused = true;
gif.playing = false;
clearTimeout(timerID);
}
function togglePlay(){
if(gif.paused || !gif.playing){
gif.play();
}else{
gif.pause();
}
}
function seekFrame(frame) { // seeks to frame number.
clearTimeout(timerID);
frame = frame < 0 ? (frame % gif.frames.length) + gif.frames.length : frame;
gif.currentFrame = frame % gif.frames.length;
if (gif.playing) {
playing();
} else {
gif.image = gif.frames[gif.currentFrame].image;
}
}
function seek(time) { // time in Seconds
clearTimeout(timerID);
if (time < 0) {
time = 0;
}
time *= 1000; // in ms
time %= gif.length;
var frame = 0;
while (time > gif.frames[frame].time + gif.frames[frame].delay && frame < gif.frames.length) {
frame += 1;
}
gif.currentFrame = frame;
if (gif.playing) {
playing();
} else {
gif.image = gif.frames[gif.currentFrame].image;
}
}
function playing() {
var delay;
var frame;
if (gif.playSpeed === 0) {
gif.pause();
return;
}
if (gif.playSpeed < 0) {
gif.currentFrame -= 1;
if (gif.currentFrame < 0) {
gif.currentFrame = gif.frames.length - 1;
}
frame = gif.currentFrame;
frame -= 1;
if (frame < 0) {
frame = gif.frames.length - 1;
}
delay = -gif.frames[frame].delay * 1 / gif.playSpeed;
} else {
gif.currentFrame += 1;
gif.currentFrame %= gif.frames.length;
delay = gif.frames[gif.currentFrame].delay * 1 / gif.playSpeed;
}
gif.image = gif.frames[gif.currentFrame].image;
timerID = setTimeout(playing, delay);
}
var gif = { // the gif image object
onload : null, // fire on load. Use waitTillDone = true to have load fire at end or false to fire on first frame
onerror : null, // fires on error
onprogress : null, // fires a load progress event
onloadall : null, // event fires when all frames have loaded and gif is ready
paused : false, // true if paused
playing : false, // true if playing
waitTillDone : true, // If true onload will fire when all frames loaded, if false, onload will fire when first frame has loaded
loading : false, // true if still loading
firstFrameOnly : false, // if true only load the first frame
width : null, // width in pixels
height : null, // height in pixels
frames : [], // array of frames
comment : "", // comments if found in file. Note I remember that some gifs have comments per frame if so this will be all comment concatenated
length : 0, // gif length in ms (1/1000 second)
currentFrame : 0, // current frame.
frameCount : 0, // number of frames
playSpeed : 1, // play speed 1 normal, 2 twice 0.5 half, -1 reverse etc...
lastFrame : null, // temp hold last frame loaded so you can display the gif as it loads
image : null, // the current image at the currentFrame
playOnLoad : true, // if true starts playback when loaded
// functions
load : loadGif, // call this to load a file
cancel : cancelLoad, // call to stop loading
play : play, // call to start play
pause : pause, // call to pause
seek : seek, // call to seek to time
seekFrame : seekFrame, // call to seek to frame
togglePlay : togglePlay, // call to toggle play and pause state
};
return gif;
}
/*=================================================================
USEAGE Example below
Image used from Wiki see HTML for requiered image atribution
===================================================================*/
const ctx = canvas.getContext("2d");
ctx.font = "16px arial";
var changeFrame = false;
var changeSpeed = false;
frameNum.addEventListener("mousedown",()=>{changeFrame = true ; changeSpeed = false});
speedInput.addEventListener("mousedown",()=>{changeSpeed = true; changeFrame = false});
const gifSrc = "https://upload.wikimedia.org/wikipedia/commons/thumb/9/97/Odessa_TX_Oil_Well_with_Lufkin_320D_pumping_unit.gif/220px-Odessa_TX_Oil_Well_with_Lufkin_320D_pumping_unit.gif"
var myGif = GIF(); // Creates a new gif
myGif.load(gifSrc); // set URL and load
myGif.onload = function(event){ // fires when loading is complete
frameNum.max = myGif.frameCount-1;
animate();
}
myGif.onprogress = function(event){ // Note this function is not bound to myGif
if(canvas.width !== myGif.width || canvas.height !== myGif.height){
canvas.width = myGif.width;
canvas.height = myGif.height;
ctx.font = "16px arial";
}
if(myGif.lastFrame !== null){
ctx.drawImage(myGif.lastFrame.image,0,0);
}
ctx.fillStyle = "black";
ctx.fillText("Loaded frame "+event.frame,8,20);
frameNum.max = event.frame-1;
frameNum.value = event.frame;
frameText.textContent = frameNum.value + "/" + (frameNum.max-1);
}
myGif.onerror = function(event){
ctx.fillStyle = "black";
ctx.fillText("Could not load the Gif ",8,20);
ctx.fillText("Error : " + event.type,8,40);
}
function animate(){
if(changeFrame){
if(myGif.playing){
myGif.pause();
}
myGif.seekFrame(Number(frameNum.value));
}else if(changeSpeed){
myGif.playSpeed = speedInput.value;
if(myGif.paused){
myGif.play();
}
}
frameNum.value = myGif.currentFrame;
frameText.textContent = frameNum.value + "/" + frameNum.max;
if(myGif.paused){
speedInput.value = 0;
}else{
speedInput.value = myGif.playSpeed;
}
speedText.textContent = speedInput.value;
ctx.drawImage(myGif.image,0,0);
requestAnimationFrame(animate);
}
canvas { border : 2px solid black; }
p { font-family : arial; font-size : 12px }
<canvas id="canvas"></canvas>
<div id="inputs">
<input id="frameNum" type = "range" min="0" max="1" step="1" value="0"></input>
Frame : <span id="frameText"></span><br>
<input id="speedInput" type = "range" min="-3" max="3" step="0.1" value="1"></input>
Speed : <span id="speedText"></span><br>
</div>
<p>Image source <br>By DASL51984 - Original YouTube video by user "derekdz", looped by DASL51984, CC BY-SA 4.0, Link</p>

How do you play identical audio twice in Javascript?

I'm building a DTMF dial emulator. After a certain function runs, the function dial() is executed. Currently, I hear the ringback tone once but I cannot get it to ring twice.
I originally tried to get the same file to play twice by resetting the audio currentTime to 0 and playing it again after a delay of 4 seconds (the duration between ringback tones in North America).
That didn't work so I thought perhaps JS didn't want me playing the same audio twice. So I recreated it again as a second variable and created a new function for that next in the sequence for dial().
That didn't work, but if I added an alert box before ringingTone2 played, it played as soon as I dismissed the alert.
Obviously, I can't have an alert dialog when this is done. How can I have the sound play twice, but with a 4 second gap in between? I've had no success with setTimeout(ring(), 4000) either.
Here is some of the code pertaining to this issue:
var ringingTone1 = new Audio('DTMF-ringbackTone.mp3');
var ringingTone2 = new Audio('DTMF-ringbackTone.mp3');
function dial() {
ring();
function ring() {
ringingTone1.play();
setTimeout(ringingTone2.play(),4000);
}
Right now, JS basically plays the ringback tone once and moves onto to what is after this in the script.
The following causes it to stop working:
function dial() {
ring();
function ring() {
var played = 0;
var maxPlay = 2;
var ringingTone = document.getElementById('music');
var playBtn = document.getElementById('playbtn');
ringingTone.onplay = ring() {
//played counter
played++;
};
ringingTone.addEventListener("ended", ring() {
//reset to start point
ringingTone.currentTime = 0;
if (played < maxPlay) {
ringingTone.play();
} else {
played = 0;
}
});
playBtn.addEventListener("click", ring() {
ringingTone.play();
});
}
This is my complete script:
var availableNumbers = ["0", "911"];
function numberSuggestion() {
var randomNumber = Math.random() * availableNumbers.length -1;
var suggestedNumber = availableNumbers[randomNumber];
document.getElementById("suggestion").innerHTML = "How about dialing " + suggestedNumber + "? Don't like this number? Click the button above again!";
}
var dialTone;
function offHook() {
document.getElementById("WE2500").style.display = "none";
document.getElementById("dialPad").style.display = "block";
dialTone = new Audio('dialTone.m4a');
dialTone.play();
}
var number = "";
var timeout;
function numberDial() {
if (dialTone) {
dialTone.pause();
dialTone.currentTime = 0;
}
clearTimeout(timeout);
timeout = setTimeout(dial, 2000);
}
function dial1() {
numberDial();
number = number + "1";
var tone1 = new Audio('DTMF-1.wav');
tone1.play();
}
function dial2() {
numberDial();
number = number + "2";
var tone2 = new Audio('DTMF-2.wav');
tone2.play();
}
function dial3() {
numberDial();
number = number + "3";
var tone3 = new Audio('DTMF-3.wav');
tone3.play();
}
function dial4() {
numberDial();
number = number + "4";
var tone4 = new Audio('DTMF-5.wav');
tone4.play();
}
function dial5() {
numberDial();
number = number + "5";
var tone5 = new Audio('DTMF-5.wav');
tone5.play();
}
function dial6() {
numberDial();
number = number + "6";
var tone6 = new Audio('DTMF-6.wav');
tone6.play();
}
function dial7() {
numberDial();
number = number + "7";
var tone7 = new Audio('DTMF-7.wav');
tone7.play();
}
function dial8() {
numberDial();
number = number + "8";
var tone8 = new Audio('DTMF-8.wav');
tone8.play();
}
function dial9() {
numberDial();
number = number + "9";
var tone9 = new Audio('DTMF-9.wav');
tone9.play();
}
function dial0() {
numberDial();
number = number + "0";
var tone0 = new Audio('DTMF-0.wav');
tone0.play();
}
function dialStar() {
numberDial();
number = number + "*";
var toneStar = new Audio('DTMF-star.wav');
toneStar.play();
}
function dialPound() {
numberDial();
number = number + "#";
var tonePound = new Audio('DTMF-pound.wav');
tonePound.play();
}
//var ringingTone1 = new Audio('DTMF-ringbackTone.mp3');
//var ringingTone2 = new Audio('DTMF-ringbackTone.mp3');
function dial() {
ring();
function ring() {
var played = 0;
var maxPlay = 2;
var ringingTone = document.getElementById('music');
var playBtn = document.getElementById('playbtn');
ringingTone.onplay = ring() {
//played counter
played++;
};
ringingTone.addEventListener("ended", ring() {
//reset to start point
ringingTone.currentTime = 0;
if (played < maxPlay) {
ringingTone.play();
} else {
played = 0;
}
});
playBtn.addEventListener("click", ring() {
ringingTone.play();
});
}
switch(number) {
case "0":
break;
case "911":
var pickup911 = new Audio('911-xxx-fleet.mp3');
pickup911.play();
break;
default:
}
}
The idea is simple, one counter for played time, one constant for max play.
Use the onplay event to increase the counter everytime a audio played.
Use the ended event to replay the audio if max play time not reached yet. Otherwise, set played count to 0.
var played = 0;
var maxPlay = 2;
var ringingTone = document.getElementById('music');
var playBtn = document.getElementById('playbtn');
ringingTone.onplay = function() {
//played counter
played++;
};
ringingTone.addEventListener("ended", function() {
//reset to start point
ringingTone.currentTime = 0;
if (played < maxPlay) {
ringingTone.play();
} else {
played = 0;
}
});
playBtn.addEventListener("click", function() {
ringingTone.play();
});
<audio id="music" src="http://www.noiseaddicts.com/samples_1w72b820/3732.mp3"></audio>
<button id="playbtn">Play me</button>
Calling a function in setTimeout (and I think it applies to all callbacks) using () will cause the code inside to be evaluated immediately (which in your case meant play both audios at the same time).
Hence my comment above to use .play without (), which looks like doesn't work. However, this does work:
function dial() {
function ring() {
ringingTone.play();
}
ring();
setTimeout(ring, 4000);
}
dial();
http://jsfiddle.net/tL3monyp/
(audio src unscrupulously copied from Daniel's answer ;-)
I've also added 2 timeouts to illustrate calling the function with and without ().

HammerJS pull to refresh

I know very little about Javascript but need to use hammer.js in a project I'm working on.
Instead of replacing/refreshing the image im trying to use the pull to refresh scrit to refresh the page.
Ive tried adding
location.reload();
to the script but to no avail, can anyone shed any light on this.
http://eightmedia.github.io/hammer.js/
This is the code im using
/**
* requestAnimationFrame and cancel polyfill
*/
(function() {
var lastTime = 0;
var vendors = ['ms', 'moz', 'webkit', 'o'];
for(var x = 0; x < vendors.length && !window.requestAnimationFrame; ++x) {
window.requestAnimationFrame = window[vendors[x]+'RequestAnimationFrame'];
window.cancelAnimationFrame =
window[vendors[x]+'CancelAnimationFrame'] || window[vendors[x]+'CancelRequestAnimationFrame'];
}
if (!window.requestAnimationFrame)
window.requestAnimationFrame = function(callback, element) {
var currTime = new Date().getTime();
var timeToCall = Math.max(0, 16 - (currTime - lastTime));
var id = window.setTimeout(function() { callback(currTime + timeToCall); },
timeToCall);
lastTime = currTime + timeToCall;
return id;
};
if (!window.cancelAnimationFrame)
window.cancelAnimationFrame = function(id) {
clearTimeout(id);
};
}());
/**
* pull to refresh
* #type {*}
*/
var PullToRefresh = (function() {
function Main(container, slidebox, slidebox_icon, handler) {
var self = this;
this.breakpoint = 80;
this.container = container;
this.slidebox = slidebox;
this.slidebox_icon = slidebox_icon;
this.handler = handler;
this._slidedown_height = 0;
this._anim = null;
this._dragged_down = false;
this.hammertime = Hammer(this.container)
.on("touch dragdown release", function(ev) {
self.handleHammer(ev);
});
};
/**
* Handle HammerJS callback
* #param ev
*/
Main.prototype.handleHammer = function(ev) {
var self = this;
switch(ev.type) {
// reset element on start
case 'touch':
this.hide();
break;
// on release we check how far we dragged
case 'release':
if(!this._dragged_down) {
return;
}
// cancel animation
cancelAnimationFrame(this._anim);
// over the breakpoint, trigger the callback
if(ev.gesture.deltaY >= this.breakpoint) {
container_el.className = 'pullrefresh-loading';
pullrefresh_icon_el.className = 'icon loading';
this.setHeight(60);
this.handler.call(this);
}
// just hide it
else {
pullrefresh_el.className = 'slideup';
container_el.className = 'pullrefresh-slideup';
this.hide();
}
break;
// when we dragdown
case 'dragdown':
this._dragged_down = true;
// if we are not at the top move down
var scrollY = window.scrollY;
if(scrollY > 5) {
return;
} else if(scrollY !== 0) {
window.scrollTo(0,0);
}
// no requestAnimationFrame instance is running, start one
if(!this._anim) {
this.updateHeight();
}
// stop browser scrolling
ev.gesture.preventDefault();
// update slidedown height
// it will be updated when requestAnimationFrame is called
this._slidedown_height = ev.gesture.deltaY * 0.4;
break;
}
};
/**
* when we set the height, we just change the container y
* #param {Number} height
*/
Main.prototype.setHeight = function(height) {
if(Modernizr.csstransforms3d) {
this.container.style.transform = 'translate3d(0,'+height+'px,0) ';
this.container.style.oTransform = 'translate3d(0,'+height+'px,0)';
this.container.style.msTransform = 'translate3d(0,'+height+'px,0)';
this.container.style.mozTransform = 'translate3d(0,'+height+'px,0)';
this.container.style.webkitTransform = 'translate3d(0,'+height+'px,0) scale3d(1,1,1)';
}
else if(Modernizr.csstransforms) {
this.container.style.transform = 'translate(0,'+height+'px) ';
this.container.style.oTransform = 'translate(0,'+height+'px)';
this.container.style.msTransform = 'translate(0,'+height+'px)';
this.container.style.mozTransform = 'translate(0,'+height+'px)';
this.container.style.webkitTransform = 'translate(0,'+height+'px)';
}
else {
this.container.style.top = height+"px";
}
};
/**
* hide the pullrefresh message and reset the vars
*/
Main.prototype.hide = function() {
container_el.className = '';
this._slidedown_height = 0;
this.setHeight(0);
cancelAnimationFrame(this._anim);
this._anim = null;
this._dragged_down = false;
};
/**
* hide the pullrefresh message and reset the vars
*/
Main.prototype.slideUp = function() {
var self = this;
cancelAnimationFrame(this._anim);
pullrefresh_el.className = 'slideup';
container_el.className = 'pullrefresh-slideup';
this.setHeight(0);
setTimeout(function() {
self.hide();
}, 500);
};
/**
* update the height of the slidedown message
*/
Main.prototype.updateHeight = function() {
var self = this;
this.setHeight(this._slidedown_height);
if(this._slidedown_height >= this.breakpoint){
this.slidebox.className = 'breakpoint';
this.slidebox_icon.className = 'icon arrow arrow-up';
}
else {
this.slidebox.className = '';
this.slidebox_icon.className = 'icon arrow';
}
this._anim = requestAnimationFrame(function() {
self.updateHeight();
});
};
return Main;
})();
function getEl(id) {
return document.getElementById(id);
}
var container_el = getEl('container');
var pullrefresh_el = getEl('pullrefresh');
var pullrefresh_icon_el = getEl('pullrefresh-icon');
var image_el = getEl('random-image');
var refresh = new PullToRefresh(container_el, pullrefresh_el, pullrefresh_icon_el);
// update image onrefresh
refresh.handler = function() {
var self = this;
// a small timeout to demo the loading state
setTimeout(function() {
var preload = new Image();
preload.onload = function() {
image_el.src = this.src;
self.slideUp();
};
preload.src = 'http://lorempixel.com/800/600/?'+ (new Date().getTime());
}, 1000);
};
There is a Beer involved for anyone that can fix this :)
http://jsfiddle.net/zegermens/PDcr9/1/
You're assigning the return value of the location.reload function to the src attribute of an Image element. I'm not sure if the function even has a return value, https://developer.mozilla.org/en-US/docs/Web/API/Location.reload
Try this:
// update image onrefresh
refresh.handler = function() {
var self = this;
// a small timeout to demo the loading state
setTimeout(function() {
window.location.reload();
}, 1000);
};

Categories