exponentialRampToValueAtTime does not prevent cracking in firefox - javascript

When I play a tone and then stop it, unpleasant "crack" is heard and the beginning and the end. I searched for a solution and found out that reducing the volume over time should solve that issue.
Therefore, I use AudioGainMode to ramp up and down, instead of cutting the audio abruptly:
controlGain.gain.exponentialRampToValueAtTime(
1,
gAudioCtx.currentTime+time_milliseconds/1000
);
// and later...
controlGain.gain.exponentialRampToValueAtTime(
0.0001,
gAudioCtx.currentTime+time_milliseconds/1000
);
Because exponential functions are not defined at 0, 0.0001 is used instead.
However, in Firefox, I can still hear nasty cracking. I also noticed that using longer delays has no effect - the gain reaches target value instantly.
function sleep(ms) {
return new Promise(function (resolve, reject) {
setTimeout(resolve, ms);
});
}
function play() {
const AUDIO_RAMP_DELAY = 50;
var gAudioCtx = new AudioContext();
const controlGain = gAudioCtx.createGain();
controlGain.gain.value = 0.00001;
/// Full volume at t+50ms
controlGain.gain.exponentialRampToValueAtTime(
1,
gAudioCtx.currentTime+AUDIO_RAMP_DELAY/1000
);
controlGain.connect(gAudioCtx.destination);
var osc = gAudioCtx.createOscillator();
// create a tone around 440hz
const length = 1024;
var real = new Float32Array(length);
var imag = new Float32Array(length);
real[440]=1;
//real[512]=1;
var wave = gAudioCtx.createPeriodicWave(real, imag, {disableNormalization: true});
osc.frequency.value = 1;
osc.setPeriodicWave(wave);
osc.connect(controlGain);
osc.start();
(async function() {
await sleep(AUDIO_RAMP_DELAY+1);
// now we're at full volume, wait another 2 seconds
await sleep(2000);
controlGain.gain.exponentialRampToValueAtTime(
0.00001,
gAudioCtx.currentTime+50/1000
);
await sleep(2000);
osc.stop();
document.querySelector("button").disabled = false;
})();
}
<h2>Warning: this demo makes loud sounds!</h2>
<button onclick="play(); this.disabled=true">Click to play</button>
How to make it work in firefox as well?

I noticed too that the exponentialRampToValueAtTime() does not seem to work as intended in Firefox. Too me it seems like an incorrect implementation.
A workaround could be to use linearRampToValueAtTime instead.
Alternatively you could also use setValueCurveAtTime to define your own curve.

Related

Web audio : when mobile screen turns off, setTimeout slows down

EDIT: per Yogi's comment (see "setTimeout" and "throttling" in https://developer.mozilla.org/en-US/docs/Web/API/setTimeout ), I've tried adding an AudioContext to prevent the slowdown.
document.addEventListener('click', ev => {
let audCtxt = new AudioContext({});
});
(AudioContext needs user interaction, hence the event listener.)
But, no luck.
Other ideas I'm noting here to follow up are
maybe a while loop, checking Date.now() for multiples of 10ms -- but I think that would crash the page
maybe using requestAnimationFrame?
Original post:
I have a setTimeout firing every 0.01 seconds that's acting as a master clock for my web app.
The app plays synchronized sounds that respond to user interaction, hence the need for a master clock. Simplified:
let counter = 0;
setTimeout(() => {
counter++;
console.log(counter);
}, 10);
When on a mobile device, the setTimeout slows down (about 2-4x) when the screen is locked/off. (Tested on Android, not iOS).
This can be verified by logging, like the above, or by generating a sound when the counter is multiple of 100.
How can I prevent this?
Should I be taking a different approach to a "master clock" that synchronizes triggering audio samples while still allowing the audio to respond in real time to user interaction?
setTimeout is not reliable as other things, such as promises, have higher execution priority. One possible workaround is to create a custom timer using promises. Here is an example:
var customDelay = new Promise(function (resolve) {
var delay = 10; // milliseconds
var before = Date.now();
while (Date.now() < before + delay) { };
resolve();
});
customDelay.then(function () {
//Timer triggered
});
Update 1:
Given that you want a 10ms update frequency, running the above code on the main thread ends up locking up the UI due to the while loop. With that in mind, offloading that while loop into a web worker would resolve this. Here is some code:
<html>
<head>
<title></title>
</head>
<body>
<script id="FastTimer" type="javascript/worker">
onmessage = function (event) {
var delay = 10; // milliseconds
var before = Date.now();
while (Date.now() < before + delay) { };
postMessage({data: []});
};
</script>
<script>
var worker;
window.onload = function() {
var blob = new Blob([document.querySelector("#FastTimer").textContent]);
blobURL = window.URL.createObjectURL(blob);
worker = new Worker(blobURL);
worker.addEventListener("message", receivedWorkerMessage);
worker.onerror = workerError;
//Start the worker.
worker.postMessage({});
}
var counter = 0;
function receivedWorkerMessage(event) {
worker.postMessage({});
timerTiggered();
}
function timerTiggered() {
counter++;
console.log(counter);
}
function workerError(error) {
alert(error.message);
}
function stopWorker() {
worker.terminate();
worker = null;
}
</script>
</body>
</html>
The main issue with the above is that I suspect there would be some sort of time cost going back and forth between the worker (maybe a couple ms, hard to say).
As mentioned, normally requestAnimationFrame is used for animations in web apps. However, this would likely not fire when the screen is locked. But if you want to try, here is a sample:
<html>
<head>
<title></title>
</head>
<body>
<script>
var counter = 0;
var minTimeSpan = 10;
var lastTime = performance.now();
function animate() {
let t = performance.now();
if (t - lastTime >= minTimeSpan) {
timerTiggered();
}
requestAnimationFrame(animate);
}
function timerTiggered() {
counter++;
console.log(counter);
}
animate();
</script>
</body>
</html>
This can usually be solved by using a Web Worker for running the timer. I created a library which looks like setTimeout() but uses a Web Worker internally.
https://github.com/chrisguttandin/worker-timers
But there was a bug a while ago in some browser (I forgot which one it was) which caused this to not work any longer. Therefore I built the same abstraction on top of a running AudioContext.
https://github.com/chrisguttandin/audio-context-timers
But as you already said this only works if the page is already allowed to run an AudioContext by starting one in response to a click handler.

WebGLSync is always UNSIGNALED

I'm trying to play around with WebGLSyncs and I'm having a hard time getting a WebGLSync to be signaled.
The following is unsignaled on all browsers supporting WebGL2 (Chrome, Opera, Firefox):
function test() {
let canvas = document.createElement('canvas');
let gl = canvas.getContext('webgl2');
let sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
gl.finish();
let status = gl.getSyncParameter(sync, gl.SYNC_STATUS);
console.log(sync, status, status === gl.UNSIGNALED); // logs "true"
gl.deleteSync(sync);
}
I'm expecting this to work, since gl.finish() should wait until all GPU commands have been processed - but it looks like the sync fence was not.
I would very much appreciate a minimal, working WebGLSync example that actually gets signaled. I searched GitHub for such but I found nothing.
EDIT
Based on the answer from pleup, I put together this code sample which works fine in my environment (Windows + Chrome).
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function test() {
let canvas = document.createElement('canvas');
let gl = canvas.getContext('webgl2');
let sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
while (gl.getSyncParameter(sync, gl.SYNC_STATUS) === gl.UNSIGNALED) {
await sleep(100);
}
let status = gl.getSyncParameter(sync, gl.SYNC_STATUS);
console.log(sync, status, status === gl.SIGNALED);
gl.deleteSync(sync);
}
test()
You have to wait for a tick to see the sync signaled. The sync status will never change during JS execution frame.
https://www.khronos.org/registry/webgl/specs/latest/2.0/#3.7.14
In order to ensure consistent behavior across platforms, sync objects may only transition to the signaled state when the user agent's event loop is not executing a task. In other words:
A sync object must not become signaled until control has returned to the user agent's main loop.
Repeatedly fetching a sync object's SYNC_STATUS parameter in a loop, without returning control to the user agent, must always return the same value.
from the WebGL2 spec
5.39 Sync objects' results must not be made available in the current frame
In OpenGL ES 3.0, if the appropriate primitives (e.g. glFinish() or another synchronous API) are called, a sync object may be signaled in the same frame it was issued. In WebGL, in order to improve application portability, a sync object must never transition to the signaled state in the same frame the sync was issued. See the specification of getSyncParameter and clientWaitSync for discussion and rationale.
async function test() {
let canvas = document.createElement('canvas');
let gl = canvas.getContext('webgl2');
let sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
gl.finish();
await waitFrame();
let status = gl.getSyncParameter(sync, gl.SYNC_STATUS);
console.log(sync, glEnumToString(gl, status));
gl.deleteSync(sync);
}
function waitFrame() {
return new Promise((resolve) => {
requestAnimationFrame(resolve);
});
}
function glEnumToString(gl, v) {
for (const k in gl) {
if (gl[k] === v) {
return k;
}
}
return `0x${v.toString(16)}`;
}
test();

Javascript recursive function for banner pictures

I trying to create a rotating banner for cycle a few images.
const images = [url1, url2 url3]
const rotate = (url) => {
document.getElementById('banner').style.background = url
rotate(next)
}
Url's are correct full paths only shortened here in question. I'm striggl ing to solve how to get next in code above. This is some code I write from looking at examples. Im not a js programmer only recently started. Thank you for helping.
Here's one approach:
const rotate = (element, delay, urls) => {
let current = 0;
(function next() {
element.style.background = url[current]; // set image
current = (current + 1) % urls.length; // update for next pass
setTimeout(next, delay); // recycle
})(); // start immediately
};
with usage:
rotate(document.getElementById('banner'), 5000, [url1, url2, url3]);
The modulo arithmetic resets current to zero after the last image has been shown.

DOM refresh on long running function

I have a button which runs a long running function when it's clicked. Now, while the function is running, I want to change the button text, but I'm having problems in some browsers like Firefox, IE.
html:
<button id="mybutt" class="buttonEnabled" onclick="longrunningfunction();"><span id="myspan">do some work</span></button>
javascript:
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
document.getElementById("mybutt").disabled = true;
document.getElementById("mybutt").className = "buttonDisabled";
//long running task here
document.getElementById("myspan").innerHTML = "done";
}
Now this has problems in firefox and IE, ( in chrome it works ok )
So I thought to put it into a settimeout:
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
document.getElementById("mybutt").disabled = true;
document.getElementById("mybutt").className = "buttonDisabled";
setTimeout(function() {
//long running task here
document.getElementById("myspan").innerHTML = "done";
}, 0);
}
but this doesn't work either for firefox! the button gets disabled, changes colour ( due to the application of the new css ) but the text does not change.
I have to set the time to 50ms instead of just 0ms, in order to make it work ( change the button text ). Now I find this stupid at least. I can understand if it would work with just a 0ms delay, but what would happen in a slower computer? maybe firefox would need 100ms there in the settimeout? it sounds rather stupid. I tried many times, 1ms, 10ms, 20ms...no it won't refresh it. only with 50ms.
So I followed the advice in this topic:
Forcing a DOM refresh in Internet explorer after javascript dom manipulation
so I tried:
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
var a = document.getElementById("mybutt").offsetTop; //force refresh
//long running task here
document.getElementById("myspan").innerHTML = "done";
}
but it doesn't work ( FIREFOX 21). Then i tried:
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
document.getElementById("mybutt").disabled = true;
document.getElementById("mybutt").className = "buttonDisabled";
var a = document.getElementById("mybutt").offsetTop; //force refresh
var b = document.getElementById("myspan").offsetTop; //force refresh
var c = document.getElementById("mybutt").clientHeight; //force refresh
var d = document.getElementById("myspan").clientHeight; //force refresh
setTimeout(function() {
//long running task here
document.getElementById("myspan").innerHTML = "done";
}, 0);
}
I even tried clientHeight instead of offsetTop but nothing. the DOM does not get refreshed.
Can someone offer a reliable solution preferrably non-hacky ?
thanks in advance!
as suggested here i also tried
$('#parentOfElementToBeRedrawn').hide().show();
to no avail
Force DOM redraw/refresh on Chrome/Mac
TL;DR:
looking for a RELIABLE cross-browser method to have a forced DOM refresh WITHOUT the use of setTimeout (preferred solution due to different time intervals needed depending on the type of long running code, browser, computer speed and setTimeout requires anywhere from 50 to 100ms depending on situation)
jsfiddle: http://jsfiddle.net/WsmUh/5/
Webpages are updated based on a single thread controller, and half the browsers don't update the DOM or styling until your JS execution halts, giving computational control back to the browser. That means if you set some element.style.[...] = ... it won't kick in until your code finishes running (either completely, or because the browser sees you're doing something that lets it intercept processing for a few ms).
You have two problems: 1) your button has a <span> in it. Remove that, just set .innerHTML on the button itself. But this isn't the real problem of course. 2) you're running very long operations, and you should think very hard about why, and after answering the why, how:
If you're running a long computational job, cut it up into timeout callbacks (or, in 2019, await/async - see note at the end of this anser). Your examples don't show what your "long job" actually is (a spin loop doesn't count) but you have several options depending on the browsers you take, with one GIANT booknote: don't run long jobs in JavaScript, period. JavaScript is a single threaded environment by specification, so any operation you want to do should be able to complete in milliseconds. If it can't, you're literally doing something wrong.
If you need to calculate difficult things, offload it to the server with an AJAX operation (universal across browsers, often giving you a) faster processing for that operation and b) a good 30 seconds of time that you can asynchronously not-wait for the result to be returned) or use a webworker background thread (very much NOT universal).
If your calculation takes long but not absurdly so, refactor your code so that you perform parts, with timeout breathing space:
function doLongCalculation(callbackFunction) {
var partialResult = {};
// part of the work, filling partialResult
setTimeout(function(){ doSecondBit(partialResult, callbackFunction); }, 10);
}
function doSecondBit(partialResult, callbackFunction) {
// more 'part of the work', filling partialResult
setTimeout(function(){ finishUp(partialResult, callbackFunction); }, 10);
}
function finishUp(partialResult, callbackFunction) {
var result;
// do last bits, forming final result
callbackFunction(result);
}
A long calculation can almost always be refactored into several steps, either because you're performing several steps, or because you're running the same computation a million times, and can cut it up into batches. If you have (exaggerated) this:
var resuls = [];
for(var i=0; i<1000000; i++) {
// computation is performed here
if(...) results.push(...);
}
then you can trivially cut this up into a timeout-relaxed function with a callback
function runBatch(start, end, terminal, results, callback) {
var i;
for(var i=start; i<end; i++) {
// computation is performed here
if(...) results.push(...); }
if(i>=terminal) {
callback(results);
} else {
var inc = end-start;
setTimeout(function() {
runBatch(start+inc, end+inc, terminal, results, callback);
},10);
}
}
function dealWithResults(results) {
...
}
function doLongComputation() {
runBatch(0,1000,1000000,[],dealWithResults);
}
TL;DR: don't run long computations, but if you have to, make the server do the work for you and just use an asynchronous AJAX call. The server can do the work faster, and your page won't block.
The JS examples of how to deal with long computations in JS at the client are only here to explain how you might deal with this problem if you don't have the option to do AJAX calls, which 99.99% of the time will not be the case.
edit
also note that your bounty description is a classic case of The XY problem
2019 edit
In modern JS the await/async concept vastly improves upon timeout callbacks, so use those instead. Any await lets the browser know that it can safely run scheduled updates, so you write your code in a "structured as if it's synchronous" way, but you mark your functions as async, and then you await their output them whenever you call them:
async doLongCalculation() {
let firstbit = await doFirstBit();
let secondbit = await doSecondBit(firstbit);
let result = await finishUp(secondbit);
return result;
}
async doFirstBit() {
//...
}
async doSecondBit...
...
SOLVED IT!! No setTimeout()!!!
Tested in Chrome 27.0.1453, Firefox 21.0, Internet 9.0.8112
$("#btn").on("mousedown",function(){
$('#btn').html('working');}).on('mouseup', longFunc);
function longFunc(){
//Do your long running work here...
for (i = 1; i<1003332300; i++) {}
//And on finish....
$('#btn').html('done');
}
DEMO HERE!
As of 2019 one uses double requesAnimationFrame to skip a frame instead of creating a race condition using setTimeout.
function doRun() {
document.getElementById('app').innerHTML = 'Processing JS...';
requestAnimationFrame(() =>
requestAnimationFrame(function(){
//blocks render
confirm('Heavy load done')
document.getElementById('app').innerHTML = 'Processing JS... done';
}))
}
doRun()
<div id="app"></div>
As an usage example think of calculating pi using Monte Carlo in an endless loop:
using for loop to mock while(true) - as this breaks the page
function* piMonteCarlo(r = 5, yield_cycle = 10000){
let total = 0, hits = 0, x=0, y=0, rsqrd = Math.pow(r, 2);
while(true){
total++;
if(total === Number.MAX_SAFE_INTEGER){
break;
}
x = Math.random() * r * 2 - r;
y = Math.random() * r * 2 - r;
(Math.pow(x,2) + Math.pow(y,2) < rsqrd) && hits++;
if(total % yield_cycle === 0){
yield 4 * hits / total
}
}
}
let pi_gen = piMonteCarlo(5, 1000), pi = 3;
for(let i = 0; i < 1000; i++){
// mocks
// while(true){
// basically last value will be rendered only
pi = pi_gen.next().value
console.log(pi)
document.getElementById('app').innerHTML = "PI: " + pi
}
<div id="app"></div>
And now think about using requestAnimationFrame for updates in between ;)
function* piMonteCarlo(r = 5, yield_cycle = 10000){
let total = 0, hits = 0, x=0, y=0, rsqrd = Math.pow(r, 2);
while(true){
total++;
if(total === Number.MAX_SAFE_INTEGER){
break;
}
x = Math.random() * r * 2 - r;
y = Math.random() * r * 2 - r;
(Math.pow(x,2) + Math.pow(y,2) < rsqrd) && hits++;
if(total % yield_cycle === 0){
yield 4 * hits / total
}
}
}
let pi_gen = piMonteCarlo(5, 1000), pi = 3;
function rAFLoop(calculate){
return new Promise(resolve => {
requestAnimationFrame( () => {
requestAnimationFrame(() => {
typeof calculate === "function" && calculate()
resolve()
})
})
})
}
let stopped = false
async function piDOM(){
while(stopped==false){
await rAFLoop(() => {
pi = pi_gen.next().value
console.log(pi)
document.getElementById('app').innerHTML = "PI: " + pi
})
}
}
function stop(){
stopped = true;
}
function start(){
if(stopped){
stopped = false
piDOM()
}
}
piDOM()
<div id="app"></div>
<button onclick="stop()">Stop</button>
<button onclick="start()">start</button>
As described in the "Script taking too long and heavy jobs" section of Events and timing in-depth (an interesting reading, by the way):
[...] split the job into parts which get scheduled after each other. [...] Then there is a “free time” for the browser to respond between parts. It is can render and react on other events. Both the visitor and the browser are happy.
I am sure that there are many times in which a task cannot be splitted into smaller tasks, or fragments. But I am sure that there will be many other times in which this is possible too! :-)
Some refactoring is needed in the example provided. You could create a function to do a piece of the work you have to do. It could begin like this:
function doHeavyWork(start) {
var total = 1000000000;
var fragment = 1000000;
var end = start + fragment;
// Do heavy work
for (var i = start; i < end; i++) {
//
}
Once the work is finished, function should determine if next work piece must be done, or if execution has finished:
if (end == total) {
// If we reached the end, stop and change status
document.getElementById("btn").innerHTML = "done!";
} else {
// Otherwise, process next fragment
setTimeout(function() {
doHeavyWork(end);
}, 0);
}
}
Your main dowork() function would be like this:
function dowork() {
// Set "working" status
document.getElementById("btn").innerHTML = "working";
// Start heavy process
doHeavyWork(0);
}
Full working code at http://jsfiddle.net/WsmUh/19/ (seems to behave gently on Firefox).
If you don't want to use setTimeout then you are left with WebWorker - this will require HTML5 enabled browsers however.
This is one way you can use them -
Define your HTML and an inline script (you don't have to use inline script, you can just as well give an url to an existing separate JS file):
<input id="start" type="button" value="Start" />
<div id="status">Preparing worker...</div>
<script type="javascript/worker">
postMessage('Worker is ready...');
onmessage = function(e) {
if (e.data === 'start') {
//simulate heavy work..
var max = 500000000;
for (var i = 0; i < max; i++) {
if ((i % 100000) === 0) postMessage('Progress: ' + (i / max * 100).toFixed(0) + '%');
}
postMessage('Done!');
}
};
</script>
For the inline script we mark it with type javascript/worker.
In the regular Javascript file -
The function that converts the inline script to a Blob-url that can be passed to a WebWorker. Note that this might note work in IE and you will have to use a regular file:
function getInlineJS() {
var js = document.querySelector('[type="javascript/worker"]').textContent;
var blob = new Blob([js], {
"type": "text\/plain"
});
return URL.createObjectURL(blob);
}
Setup worker:
var ww = new Worker(getInlineJS());
Receive messages (or commands) from the WebWorker:
ww.onmessage = function (e) {
var msg = e.data;
document.getElementById('status').innerHTML = msg;
if (msg === 'Done!') {
alert('Next');
}
};
We kick off with a button-click in this demo:
document.getElementById('start').addEventListener('click', start, false);
function start() {
ww.postMessage('start');
}
Working example here:
http://jsfiddle.net/AbdiasSoftware/Ls4XJ/
As you can see the user-interface is updated (with progress in this example) even if we're using a busy-loop on the worker. This was tested with an Atom based (slow) computer.
If you don't want or can't use WebWorker you have to use setTimeout.
This is because this is the only way (beside from setInterval) that allow you to queue up an event. As you noticed you will need to give it a few milliseconds as this will give the UI engine "room to breeth" so-to-speak. As JS is single-threaded you cannot queue up events other ways (requestAnimationFrame will not work well in this context).
Hope this helps.
Update: I don't think in the long term that you can be sure of avoiding Firefox's aggressive avoidance of DOM updates without using a timeout. If you want to force a redraw / DOM update, there are tricks available, like adjusting the offset of elements, or doing hide() then show(), etc., but there is nothing very pretty available, and after a while when those tricks get abused and slow down user experience, then browsers get updated to ignore those tricks. See this article and the linked articles beside it for some examples: Force DOM redraw/refresh on Chrome/Mac
The other answers look like they have the basic elements needed, but I thought it would be worthwhile to mention that my practice is to wrap all interactive DOM-changing functions in a "dispatch" function which handles the necessary pauses needed to get around the fact that Firefox is extremely aggressive in avoiding DOM updates in order to score well on benchmarks (and to be responsive to users while browsing the internet).
I looked at your JSFiddle and customized a dispatch function the one that many of my programs rely on. I think it is self-explanatory, and you can just paste it into your existing JS Fiddle to see how it works:
$("#btn").on("click", function() { dispatch(this, dowork, 'working', 'done!'); });
function dispatch(me, work, working, done) {
/* work function, working message HTML string, done message HTML string */
/* only designed for a <button></button> element */
var pause = 50, old;
if (!me || me.tagName.toLowerCase() != 'button' || me.innerHTML == working) return;
old = me.innerHTML;
me.innerHTML = working;
setTimeout(function() {
work();
me.innerHTML = done;
setTimeout(function() { me.innerHTML = old; }, 1500);
}, pause);
}
function dowork() {
for (var i = 1; i<1000000000; i++) {
//
}
}
Note: the dispatching function also blocks calls from happening at the same time, because it can seriously confuse users if status updates from multiple clicks are happening together.
Fake an ajax request
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
document.getElementById("mybutt").disabled = true;
document.getElementById("mybutt").className = "buttonDisabled";
$.ajax({
url: "/",
complete: function () {
//long running task here
document.getElementById("myspan").innerHTML = "done";
}
});}
Try this
function longRunningTask(){
// Do the task here
document.getElementById("mybutt").value = "done";
}
function longrunningfunction() {
document.getElementById("mybutt").value = "doing some work";
setTimeout(function() {
longRunningTask();
}, 1);
}
Some browsers don't handle onclick html attribute good. It's better to use that event on js object. Like this:
<button id="mybutt" class="buttonEnabled">
<span id="myspan">do some work</span>
</button>
<script type="text/javascript">
window.onload = function(){
butt = document.getElementById("mybutt");
span = document.getElementById("myspan");
butt.onclick = function () {
span.innerHTML = "doing some work";
butt.disabled = true;
butt.className = "buttonDisabled";
//long running task here
span.innerHTML = "done";
};
};
</script>
I made a fiddle with working example http://jsfiddle.net/BZWbH/2/
Have you tried adding listener to "onmousedown" to change the button text and click event for longrunning function.
Slightly modified your code at jsfiddle and:
$("#btn").on("click", dowork);
function dowork() {
document.getElementById("btn").innerHTML = "working";
setTimeout(function() {
for (var i = 1; i<1000000000; i++) {
//
}
document.getElementById("btn").innerHTML = "done!";
}, 100);
}
Timeout set to more reasonable value 100ms did the trick for me. Try it.
Try adjusting the latency to find the best value.
DOM buffer also exists in default browser on android,
long running javascript only flush DOM buffer once,
use setTimeout(..., 50) to solve it.
I have adapted Estradiaz's double animation frame method for async/await:
async function waitForDisplayUpdate() {
await waitForNextAnimationFrame();
await waitForNextAnimationFrame();
}
function waitForNextAnimationFrame() {
return new Promise((resolve) => {
window.requestAnimationFrame(() => resolve());
});
}
async function main() {
const startTime = performance.now();
for (let i = 1; i <= 5; i++) {
setStatus("Step " + i);
await waitForDisplayUpdate();
wasteCpuTime(1000);
}
const elapsedTime = Math.round(performance.now() - startTime);
setStatus(`Completed in ${elapsedTime} ms`);
}
function wasteCpuTime(ms) {
const startTime = performance.now();
while (performance.now() < startTime + ms) {
if (Math.random() == 0) {
console.log("A very rare event has happened.");
}
}
}
function setStatus(s) {
document.getElementById("status").textContent = s;
}
document.addEventListener("DOMContentLoaded", main);
Status: <span id="status">Start</span>

Is it possible to play HTML5 video in reverse?

Can HTML5 <video> tag be played in reverse, or do I have to download 2 videos (forward and backward play)?
I'm looking for a solution that avoids a user from downloading 2 videos.
Without even going into HTML5 or Javascript, many video formats are streaming formats that are designed to be played forward. Playing it backwards would require decoding the whole stream, storing each raw frame on the disk to avoid clobbering memory, then rendering the frames backwards.
At least one person actually tried that using mplayer, though, so it can be done, at least in principle.
I managed to do it in an update method. Every frame I decrease video.currentTime to the time elapsed and so far it is the best result I managed to get.
aMediaElement.playbackRate = -1;
UAs may not support this, though it is valid to set playbackRate to a negative value.
This snippet just shows, how it could be done, but it takes some time to copy each frame. Which highly depends on the hardware.
It generates a canvas of each frame it has to play. When it's on 100% the playback starts directly and plays backward, forward... and so on. The original Video is also attached after the generation, but won't play automatically due to iframe rules.
It is fine for short videos and as a proof of concept.
Update:
I changed the order of the capture part so that the frame at max duration is skipped but the one at 0 is captured (forgot it before).
The frame at max duration caused a white canvas on every video i tried.
I also changed the playback to play it in reverse order as soon as the last frame is reached for an endless playback. So you easily see, that this playback is a bit CPU intensive compared to hardware accelerated videos.
fetch('https://i.imgur.com/BPQF5yy.mp4')
.then(res => res.blob())
.then(blob => {
return new Promise((res) => {
const fr = new FileReader();
fr.onload = e => res(fr.result);
fr.readAsDataURL(blob);
})
}).then(async(base64str) => {
const video = document.createElement("video");
video.src = base64str;
video.controls = true;
video.className = "w-50";
while (isNaN(video.duration))
await new Promise((r) => setTimeout(r, 50));
const FPS = 25;
const status = document.createElement("div"),
length = document.createElement("div");
length.innerText = `Generating ${Math.ceil(video.duration / (1 / FPS))} frames for a ${FPS} FPS playback (Video Duration = ${video.duration})`;
document.body.appendChild(length);
document.body.appendChild(status);
const frames = [],
copy = () => {
const c = document.createElement("canvas");
Object.assign(c, {
width: video.videoWidth,
height: video.videoHeight,
className: "w-50"
});
c.getContext("2d").drawImage(video, 0, 0);
return c;
};
// first seek outside of the loop this image won't be copied
video.currentTime = video.duration;
// this frame seems to be always white/transparent
while (video.currentTime) {
if (video.currentTime - 1 / FPS < 0)
video.currentTime = 0;
else
video.currentTime = video.currentTime - 1 / FPS;
await new Promise((next) => {
video.addEventListener('seeked', () => {
frames.push(copy());
status.innerText = (frames.length / (Math.ceil(video.duration / (1 / FPS))) * 100).toFixed(2) + '%';
next();
}, {
once: true
});
});
}
/*
* frames now contains all canvas elements created,
* I just append the first image and replace it on
* every tick with the next frame.
* using last.replaceWith(frames[currentPos]) guarantees a smooth playback.
*/
let i = 0, last = frames[0];
document.body.insertAdjacentHTML('beforeend', `<div class="w-50">Captured</div><div class="w-50">Video</div>`);
document.body.appendChild(frames[0]);
const interval = setInterval(() => {
if (frames[++i]) {
last.replaceWith(frames[i]);
last = frames[i];
} else {
frames.reverse();
i=0;
}
}, 1000 / FPS);
document.body.appendChild(video);
// won't :(
video.play();
});
/* Just for this example */
.w-50 {
width: 50%;
display: inline-block;
}
* {
margin: 0;
padding: 0;
font-family: Sans-Serif;
font-size: 12px;
}
I tried request animation frame, calculated the diff and updated currentTime. This does not work, the video tag doesn't repaint fast enough.
Get the HTMLMediaElement's duration then set an Interval that would run every second and playing the media by setting the .currentTime and decrease the value every second by 1. The media element must be fully downloaded for this to work. I've only tested this on 30-second clips and unsure if faster (lesser than 1sec.) intervals are possible. Note that this method still plays the media forward. Try increasing media playback rate to make it feel more seamless.

Categories