javascript sleep function by Promise in loop - javascript

I intend to open a series of urls in firefox,each one should be opened after another in 10 minutes, here is my code should be execute in firebug console:
function sleep (time) {
return new Promise((resolve) => setTimeout(resolve, time));
}
var urls = ["https://www.google.com/","https://www.bing.com/","https://www.reddit.com/"];
for(var i = 0; i < urls.length; i++)
sleep(600000 * i).then(() => {
window.open(urls[i]);
})
But it didn't work, could anyone help me ? Thank you~

Sleep function is executing asynchronously and the for loop finished before executing any of the sleep calls.
So, the last value of for loop will be 3, and window.open function will receive as parameter the value of urls[3] which is undefined.
Have a look:
function sleep (time) {
return new Promise((resolve) => setTimeout(resolve, time));
}
var urls = ["https://www.google.com/","https://www.bing.com/","https://www.reddit.com/"];
for(var i = 0; i < urls.length; i++)
sleep(600*i).then(() => {
console.log(i);
})
One solution is to use let keyword.
You should use let keyword in order to use enclosed value of i variable.
function sleep (time) {
return new Promise((resolve) => setTimeout(resolve, time));
}
var urls = ["https://www.google.com/","https://www.bing.com/","https://www.reddit.com/"];
for(let i = 0; i < urls.length; i++)
sleep(6000*i).then(() => {
window.open(urls[i]);
})
jsFiddle solution.

Promises work very well with async/await functions.
The following will declare a new asynchronous function (i.e. it will execute outside after the function is called). The code of the async function reads very easily because it reads like a synchronous function:
function sleep(time) {
return new Promise((resolve) => setTimeout(resolve, time))
}
(async function() {
const urls = ["https://www.google.com/","https://www.bing.com/","https://www.reddit.com/"]
for (let url of urls) {
await sleep(1000)
console.log(url)
}
})()
Here's a version using Promise chaining:
function sleep(time) {
return new Promise((resolve) => setTimeout(resolve, time))
}
const urls = ["https://www.google.com/","https://www.bing.com/","https://www.reddit.com/"];
let p = Promise.resolve();
for (let url of urls) {
p = p.then( function () {
return sleep(1000);
} );
p = p.then( function () {
console.log(url);
return Promise.resolve();
} );
}

The problem is i will =3 on in all 3 cases, so you need to save i for example
function sleep (time, i) {
return new Promise((resolve) => setTimeout(() => resolve(i), time));
}
var urls = ["https://www.google.com/","https://www.bing.com/","https://www.reddit.com/"];
for(var i = 0; i < urls.length; i++)
sleep(1 * i, i).then((index) => {
console.log(urls[index]);
})
But even this will not help, because first new tab will opened, your code in inactive tab will be stopped by browser.

Think a interval + entries would be better suited for this, here is an example with es6
const urls = [
'https://www.google.com/',
'https://www.bing.com/',
'https://www.reddit.com'
]
const entries = urls.entries()
const loop = setInterval(() => {
const {value, done} = entries.next()
done ? clearInterval(loop) : open(value)
}, 600 * 10)

You could use setInterval() or setTimeout() instead of sleep to achieve this.

Related

Can someone tell me why my promise queue don't work?

I'm trying to write a simple promise queue function, the function will process 50 tasks with 10 concurrent promises:
const sendTasks = () => {
let tasks = 50;
const concurrentCount = 10;
let promisePool = 0;
while (tasks > 0) {
console.log(`current tasks: ${tasks}`);
while (promisePool < concurrentCount && task > 0) {
console.log("create promise");
tasks--;
promisePool++;
const promise = new Promise((resolve, reject) => {
setTimeout(() => {
resolve("haha");
}, Math.floor(Math.random() * 3) * 1000);
});
promise.then((value) => {
console.log(value);
promisePool--;
});
console.log(`current promisePool: ${promisePool}`);
}
}
return "done";
};
But when I execute it, the promises seems never resolve, then keep stuck in the task>0 while loop. Can someone explain to me why the promises never resolve?
A while loop inside a synchronous function will never yield control flow to Promise .thens, or to anything else. You will need to restructure the code to wait for the promises to resolve without fully terminating the sendTasks function and also without blocking the engine.
One approach is to push each Promise to an array, then await a Promise.any on that array. Have the Promises remove themselves from the array when they're finished, and recursively push more Promises to the array. Then return when no more Promises exist in the array.
const sendTasks = async () => {
let tasks = 50;
const concurrentCount = 10;
let promisePool = 0;
let promises = [];
const enqueueNext = () => {
if (!tasks) return;
// creating this variable just for the sake of logging
const thisTask = tasks--;
console.log("create promise", thisTask);
const prom = new Promise((resolve) => {
setTimeout(() => {
promises = promises.filter(p => p !== prom);
console.log('resolving', thisTask);
resolve("haha");
// recursive asynchronous call; init the next promise, if there is one
enqueueNext();
}, Math.floor(Math.random() * 3) * 1000);
});
promises.push(prom);
};
for (let i = 0; i < concurrentCount; i++) {
enqueueNext();
}
while (promises.length) {
await Promise.any(promises);
}
console.log("done");
};
sendTasks();
JavaScript is single-threaded except when it explicitly isn’t (web workers, Node multiprocessing, … – not promises), so your while (tasks > 0) { is a busy loop that never returns control to whatever event loop and gives timers the chance to fire.
You need to give a function (.then) or continuation (async/await) to your promises so that you can yield back to the event loop and be told when to continue processing.
const sendTasks = async () => {
let tasks = 50;
const concurrentCount = 10;
const promisePool = new Set();
while (tasks > 0) {
console.log(`current tasks: ${tasks}`);
while (promisePool.size < concurrentCount) {
console.log("create promise");
tasks--;
const promise = new Promise((resolve, reject) => {
setTimeout(() => {
resolve("haha");
}, Math.floor(Math.random() * 3) * 1000);
});
promise.then((value) => {
console.log(value);
promisePool.delete(promise);
});
promisePool.add(promise);
console.log(`current promisePool: ${[...promisePool]}`);
}
await Promise.race(promisePool);
}
// all tasks have been created here, but not necessarily completed
await Promise.all(promisePool);
return "done";
};
sendTasks().then(console.log);

I want to await a function to complete then run another function

This is the code I am trying with:
const arr = ['a' , 'b' ,'c', 'd']
const func = async () => {
let i = 0
let interval = setInterval(() => {
let x = arr[i++ % arr.length]
console.log(x)
if (i === 4 ) {
clearInterval(interval)
}
}, 2000)
}
const another_func = () => {
console.log('logic')
}
const main = async () => {
await func()
another_func()
}
main()
Output :-
logic
a
b
c
d
When I run this problem "logic" gets printed before all the elements of array.
Why should I do print all the elements of array first and only then run the other function and print the logic?
For that, you need to use Promise. Here is my solution:
const arr = ['a', 'b', 'c', 'd']
const func = () => new Promise((resolve, reject) => {
let i = 0
let interval = setInterval(() => {
let com = arr[i++ % arr.length]
console.log(com)
if (i === 4) {
clearInterval(interval);
resolve('success');
}
}, 2000)
})
const another_func = () => {
console.log('logic')
}
const main = async () => {
await func()
another_func()
}
main()
Your first async function doesn't use await, which is already a sign of a problem. setInterval schedules the execution of the callback, but the setInterval call itself immediately returns, so your async function returns, and the implicit promise it returns is resolved. So main is awaiting a promise that is immediately resolved.
You can typically get a correct implementation by promisifying setTimeout, i.e. you define a helper function that returns a promise which will resolve after a given delay. With that in place you can create a for loop with await in the first async function:
// Helper function that promisifies setTimeout
const delay = ms => new Promise(resolve => setTimeout(resolve, ms));
const arr = ['a' , 'b' ,'c', 'd'];
const func = async () => {
for (let i = 0; i < 4; i++) {
await delay(1000);
let x = arr[i % arr.length];
console.log(x);
}
}
const another_func = () => {
console.log('logic');
}
const main = async () => {
await func();
another_func();
}
main();

Add delay between each item in an array when looping over array

I have an async function that gets called that loops over an array an calls a function for each item.
In this example, the function is hitting an API endpoint and I need to wait for one item to finish before moving onto the next.
However, what currently happens is that each function gets called at roughly the same time, which is causing issues in the api response. So i need to wait 1 second between each request.
This is what I currently have
const delayedLoop = async () => {
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
const myAsyncFunc = async (i) => {
console.log(`item ${i}`);
await delay(0);
return true;
};
const arr = ['one', 'two', 'three'];
const promises = arr.map(
(_, i) =>
new Promise((resolve) =>
setTimeout(async () => {
await myAsyncFunc(i);
resolve(true);
}, 1000),
),
);
return Promise.all(promises);
}
const myFunc = async () => {
console.log('START');
await delayedLoop();
console.log('FINISH');
}
myFunc();
What happens is;
LogsSTART
waits 1 second
Logs all item ${i} together (without delay in between)
Immediately logs FINISH
What I want to happen is
LogsSTART
waits 1 second
Logs item 1
waits 1 second
Logs item 2
waits 1 second
Logs item 3
Immediately logs FINISH
See JSFiddle to see it in action
You can do, it like this, using a simple for-loop:
const delayedLoop = async () => {
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
const myAsyncFunc = async (i) => {
console.log(`item ${i}`);
return true;
};
const arr = ['one', 'two', 'three'];
for(let i=0; i < arr.length; i++) {
await myAsyncFunc(i);
await delay(1000);
}
}
const myFunc = async () => {
console.log('START');
await delayedLoop();
console.log('FINISH');
}
myFunc();

How to run "x" promises in parallel Javascript

I have a function "promiseFunction" which returns a promise, which is resolved at a later time.
I need to call this function many times, however I only want a set number of executions of this function to happen at once.
The function calls some external single threaded c code on my computer, if I call too many instances of this code at once I crash my system, but if I call it sequentially 1 at a time it's very slow as only one thread of my cpu is doing any work.
So I came up with the code below, however it doesn't work. It will call the first 10 promises in parallel, but slowly it starts to call less and less promises at once, until it's only calling 1 promise at a time.
var totalNumberOfPromises = // total number times to run promiseFunction;
var promiseCounter = 0; // keep track of which promise call this is
for(w=0;w<10;w++){ // run 10 promises at a time
promiseFunction().then(function(resolve) {
loadNewPromise();
})
promiseCounter++;
}
function loadNewPromise(){
if(promiseCounter<totalNumberOfPromises){
promiseFunction().then(function(resolve) { loadNewPromise(); });
}else{
alert("Finished");
}
promiseCounter++;
}
Is there anything wrong with the code above that causes this behavior? And is there a standard way of doing this?
Here's a function I prepared earlier (I've used this for a few years now for just such a thing
const multiQueue = length => {
length = (isNaN(length) || length < 1) ? 1 : length;
const q = Array.from({length}, () => Promise.resolve());
let index = 0;
const add = cb => {
index = (index + 1) % length;
return (q[index] = q[index].then(() => cb()));
};
return add;
};
// demo usage
const q = multiQueue(10);
let inFlight = 0;
let maxInFlight = 0;
const promiseFunction = (i) => {
inFlight++;
maxInFlight = Math.max(inFlight, maxInFlight);
const obj = {inFlight, maxInFlight, i};
return new Promise(resolve => {
setTimeout(() => {
inFlight--;
resolve(Object.assign(obj, {t:performance.now()}));
}, 10 );
})
};
for (let i = 0; i < 40; i++) {
q(() => promiseFunction(i)).then(v => console.log(JSON.stringify(v)));
}
You can see that at most there are 10 "inFlight" requests
How about something like this? If you construct your queue which is and Array of functions that return a Promise you can splice chunks out of it and process each with a Promise.all.
const fakePromise = (id) => {
return new Promise((resolve) => {
setTimeout(() => {
console.log(`Resolving promise ${id}`)
resolve(id)
}, 100)
})
}
const queue = Array(100).fill().map((_, i) => {
return () => fakePromise(i)
})
const batchProcessPromises = (promises, batchSize) => {
if (promises && promises.length) {
return Promise.all(
promises.splice(0, batchSize)
.map(promise => promise())
)
.then(() => {
console.log('Batch complete')
return batchProcessPromises(promises, batchSize)
})
}
console.log('Batch complete')
return Promise.resolve()
}
batchProcessPromises(queue, 10)
.then(() => {
console.log('Time to get one with my day')
})
How do you plan to construct all your promises? This function effects the original queue so you would need to ensure that the array being passed into batchProcessPromises isn't shared. To get around this you could potentially use the spread operator like so
batchProcessPromises([...queue], 10)
.then(() => {
console.log('Time to get one with my day', queue)
})
Fiddle here https://jsfiddle.net/stwilz/2bpdcxo6/24/

ES6 Promise replacement of async.eachLimit / async.mapLimit

In async, if I need to apply a asynchronousfunction to 1000 items, I can do that with:
async.mapLimit(items, 10, (item, callback) => {
foo(item, callback);
});
so that only 10 item are processed at the same time, limiting overhead and allowing control.
With ES6 promise, while I can easily do:
Promise.all(items.map((item) => {
return bar(item);
}));
that would process all 1000 items at the same time which may cause a lot of problems.
I know Bluebird have ways to handle that, but I am searching a ES6 solution.
If you don't care about the results, then it's quick to whip one up:
Promise.eachLimit = async (funcs, limit) => {
let rest = funcs.slice(limit);
await Promise.all(funcs.slice(0, limit).map(async func => {
await func();
while (rest.length) {
await rest.shift()();
}
}));
};
// Demo:
var wait = ms => new Promise(resolve => setTimeout(resolve, ms));
async function foo(s) {
await wait(Math.random() * 2000);
console.log(s);
}
(async () => {
let funcs = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".split("").map(s => () => foo(s));
await Promise.eachLimit(funcs, 5);
})();
A key performance property is running the next available function as soon as any function finishes.
Preserving results
Preserving the results in order makes it a little less elegant perhaps, but not too bad:
Promise.mapLimit = async (funcs, limit) => {
let results = [];
await Promise.all(funcs.slice(0, limit).map(async (func, i) => {
results[i] = await func();
while ((i = limit++) < funcs.length) {
results[i] = await funcs[i]();
}
}));
return results;
};
// Demo:
var wait = ms => new Promise(resolve => setTimeout(resolve, ms));
async function foo(s) {
await wait(Math.random() * 2000);
console.log(s);
return s.toLowerCase();
}
(async () => {
let funcs = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".split("").map(s => () => foo(s));
console.log((await Promise.mapLimit(funcs, 5)).join(""));
})();
There's nothing built in, but you can of course group them yourself into promise chains, and use a Promise.all on the resulting array of chains:
const items = /* ...1000 items... */;
const concurrencyLimit = 10;
const promise = Promise.all(items.reduce((promises, item, index) => {
// What chain do we add it to?
const chainNum = index % concurrencyLimit;
let chain = promises[chainNum];
if (!chain) {
// New chain
chain = promises[chainNum] = Promise.resolve();
}
// Add it
promises[chainNum] = chain.then(_ => foo(item));
return promises;
}, []));
Here's an example, showing how many concurrent promises there are any given time (and also showing when each "chain" is complete, and only doing 200 instead of 1,000):
const items = buildItems();
const concurrencyLimit = 10;
const promise = Promise.all(items.reduce((promises, item, index) => {
const chainNum = index % concurrencyLimit;
let chain = promises[chainNum];
if (!chain) {
chain = promises[chainNum] = Promise.resolve();
}
promises[chainNum] = chain.then(_ => foo(item));
return promises;
}, []).map(chain => chain.then(_ => console.log("Chain done"))));
promise.then(_ => console.log("All done"));
function buildItems() {
const items = [];
for (let n = 0; n < 200; ++n) {
items[n] = n;
}
return items;
}
var outstanding = 0;
function foo(item) {
++outstanding;
console.log("Starting " + item + " (" + outstanding + ")");
return new Promise(resolve => {
setTimeout(_ => {
--outstanding;
console.log("Resolving " + item + " (" + outstanding + ")");
resolve(item);
}, Math.random() * 500);
});
}
.as-console-wrapper {
max-height: 100% !important;
}
I should note that if you want to track the result of each of those, you'd have to modify the above; it doesn't try to track the results (!). :-)
Using Array.prototype.splice
while (funcs.length) {
await Promise.all( funcs.splice(0, 100).map(f => f()) )
}
This is the closest one to async.eachLimit
Promise.eachLimit = async (coll, limit, asyncFunc) => {
let ret = [];
const splitArr = coll.reduce((acc,item,i)=> (i%limit) ? acc :[...acc,coll.slice(i,i+limit)],[])
for(let i =0; i< splitArr.length;i++){
ret[i]=await Promise.all(splitArr[i].map(ele=>asyncFunc(ele)));
}
return ret;
}
const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
async function foo(s) {
await wait(Math.random() * 2000);
console.log(s);
return s.toLowerCase();
}
(async () => {
let arr = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".split("");
console.log((await Promise.eachLimit(arr, 5, foo)));
})();

Categories