I'm trying to play around with WebGLSyncs and I'm having a hard time getting a WebGLSync to be signaled.
The following is unsignaled on all browsers supporting WebGL2 (Chrome, Opera, Firefox):
function test() {
let canvas = document.createElement('canvas');
let gl = canvas.getContext('webgl2');
let sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
gl.finish();
let status = gl.getSyncParameter(sync, gl.SYNC_STATUS);
console.log(sync, status, status === gl.UNSIGNALED); // logs "true"
gl.deleteSync(sync);
}
I'm expecting this to work, since gl.finish() should wait until all GPU commands have been processed - but it looks like the sync fence was not.
I would very much appreciate a minimal, working WebGLSync example that actually gets signaled. I searched GitHub for such but I found nothing.
EDIT
Based on the answer from pleup, I put together this code sample which works fine in my environment (Windows + Chrome).
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function test() {
let canvas = document.createElement('canvas');
let gl = canvas.getContext('webgl2');
let sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
while (gl.getSyncParameter(sync, gl.SYNC_STATUS) === gl.UNSIGNALED) {
await sleep(100);
}
let status = gl.getSyncParameter(sync, gl.SYNC_STATUS);
console.log(sync, status, status === gl.SIGNALED);
gl.deleteSync(sync);
}
test()
You have to wait for a tick to see the sync signaled. The sync status will never change during JS execution frame.
https://www.khronos.org/registry/webgl/specs/latest/2.0/#3.7.14
In order to ensure consistent behavior across platforms, sync objects may only transition to the signaled state when the user agent's event loop is not executing a task. In other words:
A sync object must not become signaled until control has returned to the user agent's main loop.
Repeatedly fetching a sync object's SYNC_STATUS parameter in a loop, without returning control to the user agent, must always return the same value.
from the WebGL2 spec
5.39 Sync objects' results must not be made available in the current frame
In OpenGL ES 3.0, if the appropriate primitives (e.g. glFinish() or another synchronous API) are called, a sync object may be signaled in the same frame it was issued. In WebGL, in order to improve application portability, a sync object must never transition to the signaled state in the same frame the sync was issued. See the specification of getSyncParameter and clientWaitSync for discussion and rationale.
async function test() {
let canvas = document.createElement('canvas');
let gl = canvas.getContext('webgl2');
let sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
gl.finish();
await waitFrame();
let status = gl.getSyncParameter(sync, gl.SYNC_STATUS);
console.log(sync, glEnumToString(gl, status));
gl.deleteSync(sync);
}
function waitFrame() {
return new Promise((resolve) => {
requestAnimationFrame(resolve);
});
}
function glEnumToString(gl, v) {
for (const k in gl) {
if (gl[k] === v) {
return k;
}
}
return `0x${v.toString(16)}`;
}
test();
Related
I have a web socket that receives data from a web socket server every 100 to 200ms, ( I have tried both with a shared web worker as well as all in the main.js file),
When new JSON data arrives my main.js runs filter_json_run_all(json_data) which updates Tabulator.js & Dygraph.js Tables & Graphs with some custom color coding based on if values are increasing or decreasing
1) web socket json data ( every 100ms or less) -> 2) run function filter_json_run_all(json_data) (takes 150 to 200ms) -> 3) repeat 1 & 2 forever
Quickly the timestamp of the incoming json data gets delayed versus the actual time (json_time 15:30:12 vs actual time: 15:31:30) since the filter_json_run_all is causing a backlog in operations.
So it causes users on different PC's to have websocket sync issues, based on when they opened or refreshed the website.
This is only caused by the long filter_json_run_all() function, otherwise if all I did was console.log(json_data) they would be perfectly in sync.
Please I would be very very grateful if anyone has any ideas how I can prevent this sort of blocking / backlog of incoming JSON websocket data caused by a slow running javascript
function :)
I tried using a shared web worker which works but it doesn't get around the delay in main.js blocked by filter_json_run_all(), I dont thing I can put filter_json_run_all() since all the graph & table objects are defined in main & also I have callbacks for when I click on a table to update a value manually (Bi directional web socket)
If you have any ideas or tips at all I will be very grateful :)
worker.js:
const connectedPorts = [];
// Create socket instance.
var socket = new WebSocket(
'ws://'
+ 'ip:port'
+ '/ws/'
);
// Send initial package on open.
socket.addEventListener('open', () => {
const package = JSON.stringify({
"time": 123456,
"channel": "futures.tickers",
"event": "subscribe",
"payload": ["BTC_USD", "ETH_USD"]
});
socket.send(package);
});
// Send data from socket to all open tabs.
socket.addEventListener('message', ({ data }) => {
const package = JSON.parse(data);
connectedPorts.forEach(port => port.postMessage(package));
});
/**
* When a new thread is connected to the shared worker,
* start listening for messages from the new thread.
*/
self.addEventListener('connect', ({ ports }) => {
const port = ports[0];
// Add this new port to the list of connected ports.
connectedPorts.push(port);
/**
* Receive data from main thread and determine which
* actions it should take based on the received data.
*/
port.addEventListener('message', ({ data }) => {
const { action, value } = data;
// Send message to socket.
if (action === 'send') {
socket.send(JSON.stringify(value));
// Remove port from connected ports list.
} else if (action === 'unload') {
const index = connectedPorts.indexOf(port);
connectedPorts.splice(index, 1);
}
});
Main.js This is only part of filter_json_run_all which continues on for about 6 or 7 Tabulator & Dygraph objects. I wante to give an idea of some of the operations called with SetTimeout() etc
function filter_json_run_all(json_str){
const startTime = performance.now();
const data_in_array = json_str //JSON.parse(json_str.data);
// if ('DATETIME' in data_in_array){
// var milliseconds = (new Date()).getTime() - Date.parse(data_in_array['DATETIME']);
// console.log("milliseconds: " + milliseconds);
// }
if (summary in data_in_array){
if("DATETIME" in data_in_array){
var time_str = data_in_array["DATETIME"];
element_time.innerHTML = time_str;
}
// summary Data
const summary_array = data_in_array[summary];
var old_sum_arr_krw = [];
var old_sum_arr_irn = [];
var old_sum_arr_ntn = [];
var old_sum_arr_ccn = [];
var old_sum_arr_ihn = [];
var old_sum_arr_ppn = [];
var filtered_array_krw_summary = filterByProperty_summary(summary_array, "KWN")
old_sum_arr_krw.unshift(Table_summary_krw.getData());
Table_summary_krw.replaceData(filtered_array_krw_summary);
//Colour table
color_table(filtered_array_krw_summary, old_sum_arr_krw, Table_summary_krw);
var filtered_array_irn_summary = filterByProperty_summary(summary_array, "IRN")
old_sum_arr_irn.unshift(Table_summary_inr.getData());
Table_summary_inr.replaceData(filtered_array_irn_summary);
//Colour table
color_table(filtered_array_irn_summary, old_sum_arr_irn, Table_summary_inr);
var filtered_array_ntn_summary = filterByProperty_summary(summary_array, "NTN")
old_sum_arr_ntn.unshift(Table_summary_twd.getData());
Table_summary_twd.replaceData(filtered_array_ntn_summary);
//Colour table
color_table(filtered_array_ntn_summary, old_sum_arr_ntn, Table_summary_twd);
// remove formatting on fwds curves
setTimeout(() => {g_fwd_curve_krw.updateOptions({
'file': dataFwdKRW,
'labels': ['Time', 'Bid', 'Ask'],
strokeWidth: 1,
}); }, 200);
setTimeout(() => {g_fwd_curve_inr.updateOptions({
'file': dataFwdINR,
'labels': ['Time', 'Bid', 'Ask'],
strokeWidth: 1,
}); }, 200);
// remove_colors //([askTable_krw, askTable_inr, askTable_twd, askTable_cny, askTable_idr, askTable_php])
setTimeout(() => { askTable_krw.getRows().forEach(function (item, index) {
row = item.getCells();
row.forEach(function (value_tmp){value_tmp.getElement().style.backgroundColor = '';}
)}); }, 200);
setTimeout(() => { askTable_inr.getRows().forEach(function (item, index) {
row = item.getCells();
row.forEach(function (value_tmp){value_tmp.getElement().style.backgroundColor = '';}
)}); }, 200);
color_table Function
function color_table(new_arr, old_array, table_obj){
// If length is not equal
if(new_arr.length!=old_array[0].length)
console.log("Diff length");
else
{
// Comparing each element of array
for(var i=0;i<new_arr.length;i++)
//iterate old dict dict
for (const [key, value] of Object.entries(old_array[0][i])) {
if(value == new_arr[i][key])
{}
else{
// console.log("Different element");
if(key!="TENOR")
// console.log(table_obj)
table_obj.getRows()[i].getCell(key).getElement().style.backgroundColor = 'yellow';
if(key!="TIME")
if(value < new_arr[i][key])
//green going up
//text_to_speech(new_arr[i]['CCY'] + ' ' +new_arr[i]['TENOR']+ ' getting bid')
table_obj.getRows()[i].getCell(key).getElement().style.backgroundColor = 'Chartreuse';
if(key!="TIME")
if(value > new_arr[i][key])
//red going down
table_obj.getRows()[i].getCell(key).getElement().style.backgroundColor = 'Crimson';
}
}
}
}
Potential fudge / solution, thanks Aaron :):
function limiter(fn, wait){
let isCalled = false,
calls = [];
let caller = function(){
if (calls.length && !isCalled){
isCalled = true;
if (calls.length >2){
calls.splice(0,calls.length-1)
//remove zero' upto n-1 function calls from array/ queue
}
calls.shift().call();
setTimeout(function(){
isCalled = false;
caller();
}, wait);
}
};
return function(){
calls.push(fn.bind(this, ...arguments));
// let args = Array.prototype.slice.call(arguments);
// calls.push(fn.bind.apply(fn, [this].concat(args)));
caller();
};
}
This is then defined as a constant for a web worker to call:
const filter_json_run_allLimited = limiter(data => { filter_json_run_all(data); }, 300); // 300ms for examples
Web worker calls the limited function when new web socket data arrives:
// Event to listen for incoming data from the worker and update the DOM.
webSocketWorker.port.addEventListener('message', ({ data }) => {
// Limited function
filter_json_run_allLimited(data);
});
Please if anyone knows how websites like tradingview or real time high performance data streaming sites allow for low latency visualisation updates, please may you comment, reply below :)
I'm reticent to take a stab at answering this for real without knowing what's going on in color_table. My hunch, based on the behavior you're describing is that filter_json_run_all is being forced to wait on a congested DOM manipulation/render pipeline as HTML is being updated to achieve the color-coding for your updated table elements.
I see you're already taking some measures to prevent some of these DOM manipulations from blocking this function's execution (via setTimeout). If color_table isn't already employing a similar strategy, that'd be the first thing I'd focus on refactoring to unclog things here.
It might also be worth throwing these DOM updates for processed events into a simple queue, so that if slow browser behavior creates a rendering backlog, the function actually responsible for invoking pending DOM manipulations can elect to skip outdated render operations to keep the UI acceptably snappy.
Edit: a basic queueing system might involve the following components:
The queue, itself (this can be a simple array, it just needs to be accessible to both of the components below).
A queue appender, which runs during filter_json_run_all, simply adding objects to the end of the queue representing each DOM manipulation job you plan to complete using color_table or one of your setTimeout` callbacks. These objects should contain the operation to performed (i.e: the function definition, uninvoked), and the parameters for that operation (i.e: the arguments you're passing into each function).
A queue runner, which runs on its own interval, and invokes pending DOM manipulation tasks from the front of the queue, removing them as it goes. Since this operation has access to all of the objects in the queue, it can also take steps to optimize/combine similar operations to minimize the amount of repainting it's asking the browser to do before subsequent code can be executed. For example, if you've got several color_table operations that coloring the same cell multiple times, you can simply perform this operation once with the data from the last color_table item in the queue involving that cell. Additionally, you can further optimize your interaction with the DOM by invoking the aggregated DOM manipulation operations, themselves, inside a requestAnimationFrame callback, which will ensure that scheduled reflows/repaints happen only when the browser is ready, and is preferable from a performance perspective to DOM manipulation queueing via setTimeout/setInterval.
I have set a timeout for sending back an error. The problem is that if I need to clear the timeout with clearTimeOut() it does indeed kill it, I can see that as the value of errTimeout's _kill shows true in the debugger. But for some reason node still keeps running the script until the timeOutPeriod is over. I guess it won't really create issues in production because the calling function will receive the returned value. But it still kinda pisses me off that it keeps waiting instead of killing the script.
return new Promise((resolve,reject) => {
function checkResponse () {
//creates a timeout that returns an error if data not receieved after the specified time.
let errTimeout = setTimeout(reject, config.timeOutPeriod);
//the +1 is there because we originally reduced one, we need to use the physical number now.
if(layerKeycodes.length !== (rows+1) * (columns+1)){
//if the array is not complete, check again in 100 ms
setTimeout(checkResponse, 100);
} else {
//clear the error timeout
clearTimeout(errTimeout);
//send the layerKeycodes to the calling function
resolve(layerKeycodes);
}
}
It looks like this code is something you're trying to fit into getLayerKeycodes() from this other question to somehow know when all the data has been received from your keyboard hardware.
I'll illustrate how you can plug into that without using timers. Here's what you started with in that other question:
Your original function
const getLayerKeycodes = (keyboard, layer, rows, columns) => {
//array that stores all the keycodes according to their order
let layerKeycodes = [];
//rows and columns start the count at 0 in low level, so we need to decrease one from the actual number.
columns --;
rows --;
//loop that asks about all the keycodes in a layer
const dataReceived = (err, data) => {
if(err) {
return err;
}
// push the current keycode to the array
// The keycode is always returned as the fifth object.
layerKeycodes.push(data[5]);
console.log(layerKeycodes);
};
for (let r = 0 , c = 0;c <= columns; r ++){
//callback to fire once data is receieved back from the keyboard.
if(r > rows){
c++;
//r will turn to 0 once the continue fires and the loop executes again
r = -1;
continue;
}
//Start listening and call dataReceived when data is received
keyboard[0].read(dataReceived);
//Ask keyboard for information about keycode
// [always 0 (first byte is ignored),always 0x04 (get_keycode),layer requested,row being checked,column being checked]
keyboard[0].write([0x01,0x04,layer,r,c]);
}
console.log(layerKeycodes);
}
Manually created promise to resolve upon completion of all rows/columns
And, you can incorporate the completion detection code inside of the dataReceived() function without any timers and without reworking much of the rest of your logic like this:
const getLayerKeycodes = (keyboard, layer, rows, columns) => {
return new Promise((resolve, reject) => {
//array that stores all the keycodes according to their order
const layerKeycodes = [];
const totalCells = rows * columns;
let abort = false;
//rows and columns start the count at 0 in low level, so we need to decrease one from the actual number.
columns--;
rows--;
// function that gets with keyboard data
function dataReceived(err, data) => {
if (err) {
abort = true; // set flag to stop sending more requests
reject(err);
return;
}
// push the current keycode to the array
// The keycode is always returned as the fifth object.
layerKeycodes.push(data[5]);
// now see if we're done with all of them
if (layerKeycodes.length >= totalCells) {
resolve(layerKeycodes);
}
}
// loop that asks about all the keycodes in a layer
for (let r = 0, c = 0; c <= columns; r++) {
// stop sending more requests if we've already gotten an error
if (abort) {
break;
}
//callback to fire once data is receieved back from the keyboard.
if (r > rows) {
c++;
//r will turn to 0 once the continue fires and the loop executes again
r = -1;
continue;
}
//Start listening and call dataReceived when data is received
keyboard[0].read(dataReceived);
//Ask keyboard for information about keycode
// [always 0 (first byte is ignored),always 0x04 (get_keycode),layer requested,row being checked,column being checked]
keyboard[0].write([0x01, 0x04, layer, r, c]);
}
}
}
}
A simplified version by promisifying the read function
And, here's a bit simpler version that promisifies the read function so we can use await on it and then just use an async function and a dual nested for loop for simpler loop mechanics.
const util = require('util');
async function getLayerKeycodes(keyboard, layer, rows, columns) => {
// promisify the keyboard.read()
const readKeyboard = util.promisify(keyboard[0].read).bind(keyboard[0]);
//array that stores all the keycodes according to their order
const layerKeycodes = [];
// loop that asks about all the keycodes in a layer
for (let rowCntr = 0; rowCntr < rows; rowCntr++) {
for (let colCntr = 0; colCntr < columns; colCntr++) {
// Start listening and collect the promise
const readPromise = readKeyboard();
// Ask keyboard for information about keycode
// [always 0 (first byte is ignored),always 0x04 (get_keycode),layer requested,row being checked,column being checked]
keyboard[0].write([0x01, 0x04, layer, rowCntr, colCntr]);
// wait for data to come in
const data = await readPromise;
// push the current keycode to the array
// The keycode is always returned as the fifth object.
layerKeycodes.push(data[5]);
}
}
return layerCodes;
}
This also makes sure that we send a write, then wait for the data from that write to come back before we sent the next write which seems like a potentially safer way to handle the hardware. Your original code fires all the writes at once which might work, but it seems like the reads could come back in any order. This guarantees sequential order in the layerCodes array which seems safer (I'm not sure if that matters with this data or not).
Error handling in this last version is somewhat automatically handled by the async function and the promises. If the read returns an error, then the readPromise will automatically reject which will abort our loop and in turn reject the promise that the async function returned. So, we don't have to do the abort checking that the previous function with the manually created promise had.
Now, of course, I don't have the ability to run any of these to test them so it's possible there are some typos somewhere, but hopefully you can work through any of those and see the concept for how these work.
There is a clear memory leak in my code that causes my used memory to go from 5gb to 15.7gb in a span of 40-60 seconds, then crashes my program with an OOM error. I believe this happens when I am creating tensors to form the dataset and not when I am training the model. My data consists of 25,000 images stored locally. As such, I used the built-in tensorflow.js function tf.data.generator(generator) described here to create the dataset. I believe this is the best and most efficient way to create a large dataset as mentioned here.
Example
I used a helper class to create my dataset by passing in the path to the images
class Dataset{
constructor(dirPath){
this.paths = this.#generatePaths(dirPath);
}
// Generate file paths for all images to be read as buffer
#generatePaths = (dirPath) => {
const dir = fs.readdirSync(dirPath, {withFileTypes: true})
.filter(dirent => dirent.isDirectory())
.map(folder => folder.name)
let imagePaths = [];
dir.forEach(folder => {
fs.readdirSync(path.join(dirPath, folder)).filter(file => {
return path.extname(file).toLocaleLowerCase() === '.jpg'
}).forEach(file => {
imagePaths.push(path.resolve(path.join(dirPath, folder, file)))
})
})
return imagePaths;
}
// Convert image buffer to a Tensor object
#generateTensor = (imagePath) => {
const buffer = fs.readFileSync(imagePath);
return tf.node.decodeJpeg(buffer, 3)
.resizeNearestNeighbor([128, 128])
.toFloat()
.div(tf.scalar(255.0))
}
// Label the data with the corresponding class
#labelArray(index){return Array.from({length: 2}, (_, k) => k === index ? 1 : 0)};
// Javascript generator function passed to tf.data.generator()
* #imageGenerator(){
for(let i=0; i<this.paths.length; ++i){
let image;
try {
image = this.#generateTensor(this.paths[i]);
} catch (error) {
continue;
}
console.log(tf.memory());
yield image;
}
}
// Javascript generator function passed to tf.data.generator()
* #labelGenerator(){
for(let i=0; i<this.paths.length; ++i){
const classIndex = (path.basename(path.dirname(this.paths[i])) === 'Cat' ? 0 : 1);
const label = tf.tensor1d(this.#labelArray(classIndex), 'int32')
console.log(tf.memory());
yield label;
}
}
// Load data
loadData = () => {
console.log('\n\nLoading data...')
const xs = tf.data.generator(this.#imageGenerator.bind(this));
const ys = tf.data.generator(this.#labelGenerator.bind(this));
const ds = tf.data.zip({xs, ys}).batch(32).shuffle(32);
return ds;
}
}
And I am creating my dataset like this:
const trainDS = new dataset(trainPath).loadData();
Question
I am aware of built-in tfjs methods to manage memory such as tf.tidy() and tf.dispose(). However, I was unable to implement them in such a way to stop the memory leak, as the tensors are generated by the tf.data.generator function.
How would I go about successfully disposing the tensors from memory after they are yielded by the generators?
Every tensor you create, you need to dispose of - there is no garbage collection as you're used to in JS. That's because tensors are not kept in JS memory (they can be in GPU memory or WASM module, etc.), so JS engine cannot track them. They are more like pointers than normal variables.
For example, in your code:
return tf.node.decodeJpeg(buffer, 3)
.resizeNearestNeighbor([128, 128])
.toFloat()
.div(tf.scalar(255.0))
each chained operation creates interim tensor that never gets disposed
read it this way:
const decoded = tf.node.decodeJpeg(buffer, 3)
const resized = decoded.resizeNearestNeighbor([128, 128])
const casted = resized.toFloat();
const normalized = casted.div(tf.scalar(255.0))
return normalized;
so you have 4 large tensors allocated somewhere
what you're missing is
tf.dispose([decoded, resized, casted]);
and later when youre done with the image, also tf.dispose(image) which disposes normalized
and same regarding everything that is a tensor.
I am aware of built-in tfjs methods to manage memory such as tf.tidy() and tf.dispose(). However, I was unable to implement them in such a way to stop the memory leak, as the tensors are generated by the tf.data.generator function.
you say you're aware, but you're doing the exactly the same thing by creating interim tensors you're not disposing.
you can help yourself by wrapping such functions in a tf.tidy() that creates a local scope so everything that is not returned gets automatically released.
for example:
#generateTensor = tf.tidy(imagePath) => {
const buffer = fs.readFileSync(imagePath);
return tf.node.decodeJpeg(buffer, 3)
.resizeNearestNeighbor([128, 128])
.toFloat()
.div(tf.scalar(255.0))
}
which means interim tensors will get disposed of, but you still need to dispose the return value once youre done with it
I've got a bunch of integration tests using headless chrome. Because restarting the browser on an entirely new profile is so expensive the harness tries to "clean up" the browser state (flush caches, clear cookies and storage, ...) on teardown.
However there's a recurring issue that during the cleanup phase some async operations resolve and try to do whatever they do in a now nonsensical state.
There are two issues here:
async stack traces support in CDT are listed as experimental and don't appear at all in the response (possibly because they have to be enabled via a hidden flag somehow)
I have no idea what's still running at that point, and can't really even debug what breaks due to (1)
Is there any way to improve the situation expect by trawling through heisenbugs as they occur, trying to slowly make my way up the async callstacks throuth ever more logging until the root cause is found?
First we make a hook to be able to capture all xhr packets. You'll have to execute this before any of your other scripts load. Probaly put this in your boot/prepare script before running tests.
I have implemented below a start and stop button. start makes 300 xhr requests, just the "normal" way. If you press stop, you can cancel them all. Ideally you'd put the stop event handler code in an beforeunload event.
If you don't want to stop them, you can analyze their state, requested urls, etc... from one neat array where you keep track of everything within code.
This example works because only "so" many requests can be made at the same time by the browser. The rest in the queue waits as pending until a slot comes free. I used a 300 requests because I don't know a large/slow source to request from that isn't CORS protected, and this gives us humans enough time to press the stop button(I hope).
function addXMLRequestCallback(callback){
var oldSend, i;
if( XMLHttpRequest.callbacks ) {
// we've already overridden send() so just add the callback
XMLHttpRequest.callbacks.push( callback );
} else {
// create a callback queue
XMLHttpRequest.callbacks = [callback];
// store the native send()
oldSend = XMLHttpRequest.prototype.send;
// override the native send()
XMLHttpRequest.prototype.send = function(){
// process the callback queue
// the xhr instance is passed into each callback but seems pretty useless
// you can't tell what its destination is or call abort() without an error
// so only really good for logging that a request has happened
// I could be wrong, I hope so...
// EDIT: I suppose you could override the onreadystatechange handler though
for( i = 0; i < XMLHttpRequest.callbacks.length; i++ ) {
XMLHttpRequest.callbacks[i]( this );
}
// call the native send()
oldSend.apply(this, arguments);
}
}
}
/**
* adding some debug data to the XHR objects. Note, don't depend on this,
* this is against good practises, ideally you'll have your own wrapper
* to deal with xhr objects and meta data.
* The same way you can extend the XHR object to catch post data etc...
*/
var xhrProto = XMLHttpRequest.prototype,
origOpen = xhrProto.open;
origSend = xhrProto.send;
xhrProto.open = function (method, url) {
this._url = url;
return origOpen.apply(this, arguments);
};
xhrProto.send = function (data) {
this._data = data;
return origSend.apply(this, arguments);
};
+function() {
var xhrs = [],
i,
statuscount = 0,
status = document.getElementById('status'),
DONE = 4;;
addXMLRequestCallback((xhr) => {
xhrs.push(xhr);
});
document.getElementById('start').addEventListener('click',(e) => {
statuscount = 0;
var data = JSON.stringify({
'user': 'person',
'pwd': 'password',
'organization': 'place',
'requiredkey': 'key'
});
for(var i = 0;i < 300; i++) {
var oReq = new XMLHttpRequest();
oReq.addEventListener("load", (e) => {
statuscount++;
status.value=statuscount;
});
oReq.open("GET", 'https://code.jquery.com/jquery-3.4.1.js');
oReq.send(data);
}
});
document.getElementById('cancel').addEventListener('click', (event) => {
for(i = 0; i < xhrs.length; i++) {
if(xhrs[i].readyState !== DONE) {
console.log(xhrs[i]._url, xhrs[i]._data , 'is not done');
}
}
/** Cancel everything */
for(i = 0; i < xhrs.length; i++) {
if(xhrs[i]) {
xhrs[i].abort();
}
}
});
}();
<button id="start">start requests</button>
<button id="cancel">cancel requests</button>
<progress id="status" value="0" max="300"></progress>
Code of addXMLRequestCallback courtesy of meouw from this answer
Code of xhrProto keeping debug variables courtesy Joel Richard of from this answer
I have a button which runs a long running function when it's clicked. Now, while the function is running, I want to change the button text, but I'm having problems in some browsers like Firefox, IE.
html:
<button id="mybutt" class="buttonEnabled" onclick="longrunningfunction();"><span id="myspan">do some work</span></button>
javascript:
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
document.getElementById("mybutt").disabled = true;
document.getElementById("mybutt").className = "buttonDisabled";
//long running task here
document.getElementById("myspan").innerHTML = "done";
}
Now this has problems in firefox and IE, ( in chrome it works ok )
So I thought to put it into a settimeout:
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
document.getElementById("mybutt").disabled = true;
document.getElementById("mybutt").className = "buttonDisabled";
setTimeout(function() {
//long running task here
document.getElementById("myspan").innerHTML = "done";
}, 0);
}
but this doesn't work either for firefox! the button gets disabled, changes colour ( due to the application of the new css ) but the text does not change.
I have to set the time to 50ms instead of just 0ms, in order to make it work ( change the button text ). Now I find this stupid at least. I can understand if it would work with just a 0ms delay, but what would happen in a slower computer? maybe firefox would need 100ms there in the settimeout? it sounds rather stupid. I tried many times, 1ms, 10ms, 20ms...no it won't refresh it. only with 50ms.
So I followed the advice in this topic:
Forcing a DOM refresh in Internet explorer after javascript dom manipulation
so I tried:
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
var a = document.getElementById("mybutt").offsetTop; //force refresh
//long running task here
document.getElementById("myspan").innerHTML = "done";
}
but it doesn't work ( FIREFOX 21). Then i tried:
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
document.getElementById("mybutt").disabled = true;
document.getElementById("mybutt").className = "buttonDisabled";
var a = document.getElementById("mybutt").offsetTop; //force refresh
var b = document.getElementById("myspan").offsetTop; //force refresh
var c = document.getElementById("mybutt").clientHeight; //force refresh
var d = document.getElementById("myspan").clientHeight; //force refresh
setTimeout(function() {
//long running task here
document.getElementById("myspan").innerHTML = "done";
}, 0);
}
I even tried clientHeight instead of offsetTop but nothing. the DOM does not get refreshed.
Can someone offer a reliable solution preferrably non-hacky ?
thanks in advance!
as suggested here i also tried
$('#parentOfElementToBeRedrawn').hide().show();
to no avail
Force DOM redraw/refresh on Chrome/Mac
TL;DR:
looking for a RELIABLE cross-browser method to have a forced DOM refresh WITHOUT the use of setTimeout (preferred solution due to different time intervals needed depending on the type of long running code, browser, computer speed and setTimeout requires anywhere from 50 to 100ms depending on situation)
jsfiddle: http://jsfiddle.net/WsmUh/5/
Webpages are updated based on a single thread controller, and half the browsers don't update the DOM or styling until your JS execution halts, giving computational control back to the browser. That means if you set some element.style.[...] = ... it won't kick in until your code finishes running (either completely, or because the browser sees you're doing something that lets it intercept processing for a few ms).
You have two problems: 1) your button has a <span> in it. Remove that, just set .innerHTML on the button itself. But this isn't the real problem of course. 2) you're running very long operations, and you should think very hard about why, and after answering the why, how:
If you're running a long computational job, cut it up into timeout callbacks (or, in 2019, await/async - see note at the end of this anser). Your examples don't show what your "long job" actually is (a spin loop doesn't count) but you have several options depending on the browsers you take, with one GIANT booknote: don't run long jobs in JavaScript, period. JavaScript is a single threaded environment by specification, so any operation you want to do should be able to complete in milliseconds. If it can't, you're literally doing something wrong.
If you need to calculate difficult things, offload it to the server with an AJAX operation (universal across browsers, often giving you a) faster processing for that operation and b) a good 30 seconds of time that you can asynchronously not-wait for the result to be returned) or use a webworker background thread (very much NOT universal).
If your calculation takes long but not absurdly so, refactor your code so that you perform parts, with timeout breathing space:
function doLongCalculation(callbackFunction) {
var partialResult = {};
// part of the work, filling partialResult
setTimeout(function(){ doSecondBit(partialResult, callbackFunction); }, 10);
}
function doSecondBit(partialResult, callbackFunction) {
// more 'part of the work', filling partialResult
setTimeout(function(){ finishUp(partialResult, callbackFunction); }, 10);
}
function finishUp(partialResult, callbackFunction) {
var result;
// do last bits, forming final result
callbackFunction(result);
}
A long calculation can almost always be refactored into several steps, either because you're performing several steps, or because you're running the same computation a million times, and can cut it up into batches. If you have (exaggerated) this:
var resuls = [];
for(var i=0; i<1000000; i++) {
// computation is performed here
if(...) results.push(...);
}
then you can trivially cut this up into a timeout-relaxed function with a callback
function runBatch(start, end, terminal, results, callback) {
var i;
for(var i=start; i<end; i++) {
// computation is performed here
if(...) results.push(...); }
if(i>=terminal) {
callback(results);
} else {
var inc = end-start;
setTimeout(function() {
runBatch(start+inc, end+inc, terminal, results, callback);
},10);
}
}
function dealWithResults(results) {
...
}
function doLongComputation() {
runBatch(0,1000,1000000,[],dealWithResults);
}
TL;DR: don't run long computations, but if you have to, make the server do the work for you and just use an asynchronous AJAX call. The server can do the work faster, and your page won't block.
The JS examples of how to deal with long computations in JS at the client are only here to explain how you might deal with this problem if you don't have the option to do AJAX calls, which 99.99% of the time will not be the case.
edit
also note that your bounty description is a classic case of The XY problem
2019 edit
In modern JS the await/async concept vastly improves upon timeout callbacks, so use those instead. Any await lets the browser know that it can safely run scheduled updates, so you write your code in a "structured as if it's synchronous" way, but you mark your functions as async, and then you await their output them whenever you call them:
async doLongCalculation() {
let firstbit = await doFirstBit();
let secondbit = await doSecondBit(firstbit);
let result = await finishUp(secondbit);
return result;
}
async doFirstBit() {
//...
}
async doSecondBit...
...
SOLVED IT!! No setTimeout()!!!
Tested in Chrome 27.0.1453, Firefox 21.0, Internet 9.0.8112
$("#btn").on("mousedown",function(){
$('#btn').html('working');}).on('mouseup', longFunc);
function longFunc(){
//Do your long running work here...
for (i = 1; i<1003332300; i++) {}
//And on finish....
$('#btn').html('done');
}
DEMO HERE!
As of 2019 one uses double requesAnimationFrame to skip a frame instead of creating a race condition using setTimeout.
function doRun() {
document.getElementById('app').innerHTML = 'Processing JS...';
requestAnimationFrame(() =>
requestAnimationFrame(function(){
//blocks render
confirm('Heavy load done')
document.getElementById('app').innerHTML = 'Processing JS... done';
}))
}
doRun()
<div id="app"></div>
As an usage example think of calculating pi using Monte Carlo in an endless loop:
using for loop to mock while(true) - as this breaks the page
function* piMonteCarlo(r = 5, yield_cycle = 10000){
let total = 0, hits = 0, x=0, y=0, rsqrd = Math.pow(r, 2);
while(true){
total++;
if(total === Number.MAX_SAFE_INTEGER){
break;
}
x = Math.random() * r * 2 - r;
y = Math.random() * r * 2 - r;
(Math.pow(x,2) + Math.pow(y,2) < rsqrd) && hits++;
if(total % yield_cycle === 0){
yield 4 * hits / total
}
}
}
let pi_gen = piMonteCarlo(5, 1000), pi = 3;
for(let i = 0; i < 1000; i++){
// mocks
// while(true){
// basically last value will be rendered only
pi = pi_gen.next().value
console.log(pi)
document.getElementById('app').innerHTML = "PI: " + pi
}
<div id="app"></div>
And now think about using requestAnimationFrame for updates in between ;)
function* piMonteCarlo(r = 5, yield_cycle = 10000){
let total = 0, hits = 0, x=0, y=0, rsqrd = Math.pow(r, 2);
while(true){
total++;
if(total === Number.MAX_SAFE_INTEGER){
break;
}
x = Math.random() * r * 2 - r;
y = Math.random() * r * 2 - r;
(Math.pow(x,2) + Math.pow(y,2) < rsqrd) && hits++;
if(total % yield_cycle === 0){
yield 4 * hits / total
}
}
}
let pi_gen = piMonteCarlo(5, 1000), pi = 3;
function rAFLoop(calculate){
return new Promise(resolve => {
requestAnimationFrame( () => {
requestAnimationFrame(() => {
typeof calculate === "function" && calculate()
resolve()
})
})
})
}
let stopped = false
async function piDOM(){
while(stopped==false){
await rAFLoop(() => {
pi = pi_gen.next().value
console.log(pi)
document.getElementById('app').innerHTML = "PI: " + pi
})
}
}
function stop(){
stopped = true;
}
function start(){
if(stopped){
stopped = false
piDOM()
}
}
piDOM()
<div id="app"></div>
<button onclick="stop()">Stop</button>
<button onclick="start()">start</button>
As described in the "Script taking too long and heavy jobs" section of Events and timing in-depth (an interesting reading, by the way):
[...] split the job into parts which get scheduled after each other. [...] Then there is a “free time” for the browser to respond between parts. It is can render and react on other events. Both the visitor and the browser are happy.
I am sure that there are many times in which a task cannot be splitted into smaller tasks, or fragments. But I am sure that there will be many other times in which this is possible too! :-)
Some refactoring is needed in the example provided. You could create a function to do a piece of the work you have to do. It could begin like this:
function doHeavyWork(start) {
var total = 1000000000;
var fragment = 1000000;
var end = start + fragment;
// Do heavy work
for (var i = start; i < end; i++) {
//
}
Once the work is finished, function should determine if next work piece must be done, or if execution has finished:
if (end == total) {
// If we reached the end, stop and change status
document.getElementById("btn").innerHTML = "done!";
} else {
// Otherwise, process next fragment
setTimeout(function() {
doHeavyWork(end);
}, 0);
}
}
Your main dowork() function would be like this:
function dowork() {
// Set "working" status
document.getElementById("btn").innerHTML = "working";
// Start heavy process
doHeavyWork(0);
}
Full working code at http://jsfiddle.net/WsmUh/19/ (seems to behave gently on Firefox).
If you don't want to use setTimeout then you are left with WebWorker - this will require HTML5 enabled browsers however.
This is one way you can use them -
Define your HTML and an inline script (you don't have to use inline script, you can just as well give an url to an existing separate JS file):
<input id="start" type="button" value="Start" />
<div id="status">Preparing worker...</div>
<script type="javascript/worker">
postMessage('Worker is ready...');
onmessage = function(e) {
if (e.data === 'start') {
//simulate heavy work..
var max = 500000000;
for (var i = 0; i < max; i++) {
if ((i % 100000) === 0) postMessage('Progress: ' + (i / max * 100).toFixed(0) + '%');
}
postMessage('Done!');
}
};
</script>
For the inline script we mark it with type javascript/worker.
In the regular Javascript file -
The function that converts the inline script to a Blob-url that can be passed to a WebWorker. Note that this might note work in IE and you will have to use a regular file:
function getInlineJS() {
var js = document.querySelector('[type="javascript/worker"]').textContent;
var blob = new Blob([js], {
"type": "text\/plain"
});
return URL.createObjectURL(blob);
}
Setup worker:
var ww = new Worker(getInlineJS());
Receive messages (or commands) from the WebWorker:
ww.onmessage = function (e) {
var msg = e.data;
document.getElementById('status').innerHTML = msg;
if (msg === 'Done!') {
alert('Next');
}
};
We kick off with a button-click in this demo:
document.getElementById('start').addEventListener('click', start, false);
function start() {
ww.postMessage('start');
}
Working example here:
http://jsfiddle.net/AbdiasSoftware/Ls4XJ/
As you can see the user-interface is updated (with progress in this example) even if we're using a busy-loop on the worker. This was tested with an Atom based (slow) computer.
If you don't want or can't use WebWorker you have to use setTimeout.
This is because this is the only way (beside from setInterval) that allow you to queue up an event. As you noticed you will need to give it a few milliseconds as this will give the UI engine "room to breeth" so-to-speak. As JS is single-threaded you cannot queue up events other ways (requestAnimationFrame will not work well in this context).
Hope this helps.
Update: I don't think in the long term that you can be sure of avoiding Firefox's aggressive avoidance of DOM updates without using a timeout. If you want to force a redraw / DOM update, there are tricks available, like adjusting the offset of elements, or doing hide() then show(), etc., but there is nothing very pretty available, and after a while when those tricks get abused and slow down user experience, then browsers get updated to ignore those tricks. See this article and the linked articles beside it for some examples: Force DOM redraw/refresh on Chrome/Mac
The other answers look like they have the basic elements needed, but I thought it would be worthwhile to mention that my practice is to wrap all interactive DOM-changing functions in a "dispatch" function which handles the necessary pauses needed to get around the fact that Firefox is extremely aggressive in avoiding DOM updates in order to score well on benchmarks (and to be responsive to users while browsing the internet).
I looked at your JSFiddle and customized a dispatch function the one that many of my programs rely on. I think it is self-explanatory, and you can just paste it into your existing JS Fiddle to see how it works:
$("#btn").on("click", function() { dispatch(this, dowork, 'working', 'done!'); });
function dispatch(me, work, working, done) {
/* work function, working message HTML string, done message HTML string */
/* only designed for a <button></button> element */
var pause = 50, old;
if (!me || me.tagName.toLowerCase() != 'button' || me.innerHTML == working) return;
old = me.innerHTML;
me.innerHTML = working;
setTimeout(function() {
work();
me.innerHTML = done;
setTimeout(function() { me.innerHTML = old; }, 1500);
}, pause);
}
function dowork() {
for (var i = 1; i<1000000000; i++) {
//
}
}
Note: the dispatching function also blocks calls from happening at the same time, because it can seriously confuse users if status updates from multiple clicks are happening together.
Fake an ajax request
function longrunningfunction() {
document.getElementById("myspan").innerHTML = "doing some work";
document.getElementById("mybutt").disabled = true;
document.getElementById("mybutt").className = "buttonDisabled";
$.ajax({
url: "/",
complete: function () {
//long running task here
document.getElementById("myspan").innerHTML = "done";
}
});}
Try this
function longRunningTask(){
// Do the task here
document.getElementById("mybutt").value = "done";
}
function longrunningfunction() {
document.getElementById("mybutt").value = "doing some work";
setTimeout(function() {
longRunningTask();
}, 1);
}
Some browsers don't handle onclick html attribute good. It's better to use that event on js object. Like this:
<button id="mybutt" class="buttonEnabled">
<span id="myspan">do some work</span>
</button>
<script type="text/javascript">
window.onload = function(){
butt = document.getElementById("mybutt");
span = document.getElementById("myspan");
butt.onclick = function () {
span.innerHTML = "doing some work";
butt.disabled = true;
butt.className = "buttonDisabled";
//long running task here
span.innerHTML = "done";
};
};
</script>
I made a fiddle with working example http://jsfiddle.net/BZWbH/2/
Have you tried adding listener to "onmousedown" to change the button text and click event for longrunning function.
Slightly modified your code at jsfiddle and:
$("#btn").on("click", dowork);
function dowork() {
document.getElementById("btn").innerHTML = "working";
setTimeout(function() {
for (var i = 1; i<1000000000; i++) {
//
}
document.getElementById("btn").innerHTML = "done!";
}, 100);
}
Timeout set to more reasonable value 100ms did the trick for me. Try it.
Try adjusting the latency to find the best value.
DOM buffer also exists in default browser on android,
long running javascript only flush DOM buffer once,
use setTimeout(..., 50) to solve it.
I have adapted Estradiaz's double animation frame method for async/await:
async function waitForDisplayUpdate() {
await waitForNextAnimationFrame();
await waitForNextAnimationFrame();
}
function waitForNextAnimationFrame() {
return new Promise((resolve) => {
window.requestAnimationFrame(() => resolve());
});
}
async function main() {
const startTime = performance.now();
for (let i = 1; i <= 5; i++) {
setStatus("Step " + i);
await waitForDisplayUpdate();
wasteCpuTime(1000);
}
const elapsedTime = Math.round(performance.now() - startTime);
setStatus(`Completed in ${elapsedTime} ms`);
}
function wasteCpuTime(ms) {
const startTime = performance.now();
while (performance.now() < startTime + ms) {
if (Math.random() == 0) {
console.log("A very rare event has happened.");
}
}
}
function setStatus(s) {
document.getElementById("status").textContent = s;
}
document.addEventListener("DOMContentLoaded", main);
Status: <span id="status">Start</span>