I am modernizing some code. It has a piece to load database implemented as:
var customerQueue = async.queue(insertCustomer, DATABASE_PARALLELISM);
customerQueue.drain = function() {
logger.info('all customers loaded');
airportCodeMappingQueue.push(airportCodeMappings);
}
The function insertCustomer used to written with callbacks. I changed it to async/await, as a part of code modernization.
Now, think that I wrote an equivalent of async.queue as:
let customerQueueElements = [];
var customerQueue = {};
customerQueue.push = (customers) => {
customers.forEach(customer => {
customerQueueElements.push(insertCustomer(customer))
});
}
const processQueue = async (queue, parallelism) => {
for (let i = 0; i < queue.length; i += parallelism) {
for (let j = 0; j < parallelism; j++) {
let q = []
if (queue[i + j]) {
q.push(queue[i + j])
}
await Promise.all(q)
}
}
}
I am able now to do await ProcessQueue(customerQueue, DATABASE_PARALLELISM), but the syntax is bad, and I am keeping a visible named variable for each queue.
What would be a good way to handling this?
Also, drain() should be hooked-up to then, right ?
#Bergi is correct, as far as direction. I put together a work-in-process version:
module.exports = function () {
module.internalQueue = []
module.func = undefined
module.parallelism = 1
const process = async () => {
for (let i = 0; i < module.internalQueue.length; i += module.parallelism) {
for (let j = 0; j < module.parallelism; j++) {
let q = []
if (module.internalQueue[i + j]) {
q.push(module.func(module.internalQueue[i + j]))
}
await Promise.all(q)
}
}
module.internalQueue = []
module.drain()
}
module.queue = (func, parallelism = 1) => {
module.func = func
module.parallelism = parallelism
return module
}
module.push = async (customers) => {
module.internalQueue.push(customers)
await process()
}
module.drain = () => {}
return module
}
It is not exactly correct, yet. The signatures are similar to the async package, but my version shares the queue among all instances.
Need to figure out an easy way of creating an instance for each function with a separate e.g. "local" queue. Then, it will basically work like the original one.
Will update as I have progress.
Related
This question already has answers here:
Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference
(7 answers)
Closed 5 months ago.
passed Json to array and console show empty but with objects inside.
but when printing with index console.log show undefind..
whats wrong ?
let pokeName = [];
let pokeId = [];
let pokeImg = [];
let pokeType = [];
let pokeMove = [];
for (let i=1; i< 21; i++) {
fetch(`https://pokeapi.co/api/v2/pokemon/${i}/`)
.then(Response => Response.json())
.then(data => {
pokeId.push(data['id']);
pokeName.push(data['name']);
const dataTypes = data['types'];
pokeType.push(dataTypes[0]['type']['name']);
const dataMoves = data['moves'];
pokeMove.push(dataMoves[0]['move']['name']);
pokeImg.push(data['sprites']['front_default']);
});
}
console.log(pokeName);
console.log(pokeId);
console.log(pokeImg);
console.log(pokeMove);
console.log(pokeType);
As I mentioned in my comment, your console.log() code runs before the fetch has returned its promise, and thus no data has been set yet.
for loops actually do not work with the await keyword and so typically you just need to convert your for loop to a for await...of loop instead. Then you can call an async function that loops through all the values you need and once all promises in the loop have been returned, the following code will execute (within the scope of the same function. Any code outside of the function will run asynchronously).
let pokeName = [];
let pokeId = [];
let pokeImg = [];
let pokeType = [];
let pokeMove = [];
let pokeCount = [];
for(let i = 1; i < 21; i++) pokeCount = [...pokeCount, i];
const _FetchPokes = async () => {
for await (const poke of pokeCount) {
await fetch(`https://pokeapi.co/api/v2/pokemon/${poke}/`)
.then(Response => Response.json())
.then(data => {
pokeId.push(data['id']);
pokeName.push(data['name']);
pokeType.push(data['types'][0]['type']['name']);
pokeMove.push(data['moves'][0]['move']['name']);
pokeImg.push(data['sprites']['front_default']);
});
}
// This will run AFTER the data has been fetched
_RunAfterFetch();
}
const _RunAfterFetch = () {
// Add any code you want to run here
// This will run AFTER the pokemon data has been fetched
console.log(pokeName[1-1]);
}
_FetchPokes();
Your console logs are running before your fetch calls are finishing. You can use async/await to solve the issue.
Something like this should work:
let pokeName = [];
let pokeId = [];
let pokeImg = [];
let pokeType = [];
let pokeMove = [];
(async function () {
for (let i = 1; i < 21; i++) {
const data = await fetch(`https://pokeapi.co/api/v2/pokemon/${i}/`);
const json = data.json();
pokeId.push(json['id']);
pokeName.push(json['name']);
const dataTypes = json['types'];
pokeType.push(dataTypes[0]['type']['name']);
const dataMoves = json['moves'];
pokeMove.push(dataMoves[0]['move']['name']);
pokeImg.push(json['sprites']['front_default']);
}
})();
console.log(pokeName);
console.log(pokeId);
console.log(pokeImg);
console.log(pokeMove);
console.log(pokeType);
here is the alternate way to fetch data
const getPokemon =async(id)=>{
return await (await fetch(`https://pokeapi.co/api/v2/pokemon/${id}/`)).json();
}
const getSerializedData = async () =>{
for (let i=1; i< 10; i++) {
const data = await getPokemon(i);
console.log(data)
}
}
getSerializedData();
This question already has answers here:
Waiting for promise in for loop
(2 answers)
Closed 4 years ago.
I have a state this.state = { PathBased : [] , ...ect}
and a function containing a for loop ,after looping this.state.data I want to add every returned element of the for loop to PathBased state ,
I can't find how to do it , could you please help
by the way
console.log(this.state.data) output is 5
async readFromLocal (){
let dirs = RNFetchBlob.fs.dirs;
let path = [];
let leng = this.state.data.length;
if(leng > 0)
{
for( var j = 0 ; j < leng; j++)
{
path[j] = dirs.PictureDir+"/folder/"+this.state.data[j].title;
const xhr = RNFetchBlob.fs.readFile(path[j], 'base64')
.then ((data) => {this.setState({PathBased:[...PathBased,data]})})
.catch ((e) => {e.message});
}
}
console.log(this.state.PathBased.length) // output = 0
}
You could make use of Promise.all and set the final result in the PathBased state. Also since setState is async you need to check the updated state in setState callback.
async readFromLocal (){
let dirs = RNFetchBlob.fs.dirs;
let path = [];
let promises = [];
let leng = this.state.data.length;
if(leng > 0) {
for( let j = 0 ; j < leng; j++) {
path[j] = dirs.PictureDir+"/folder/"+this.state.data[j].title;
promises.push(RNFetchBlob.fs.readFile(path[j], 'base64'))
}
}
Promise.all(promises)
.then(data => this.setState({PathBased: data}, () => {console.log(this.state.PathBased)})
)
.catch(e => {console.log(e.message)});
}
I want to conditionally break out of a loop like this..
for(let i = 0; i < 3; i++) {
exampleFunction().then(result => {
res = 0 ? i = 3 : null
})
}
I want exampleFunction to run at least 3 times unless it gets the desired result, in which case I want it to stop running.
exampleFunction runs asynchronously. The only way to get it working is using async/await.
const iterateWithExampleFunction = async () => {
for (let i = 0; i < 3; i++) {
console.log('before', i)
await exampleFunction().then(result => {
i = result === 0 ? 3: i;
});
console.log('after', i)
}
};
const exampleFunction = async () => {
return 0;
}
iterateWithExampleFunction();
You can have a count on the outer scope and then do the async call.
let count = 0;
function executeCall() {
exampleFunction().then(result => {
// do something with the result
if (result !== 0 && count !== 3) {
count += 1;
executeCall();
}
});
}
Just await the result, than break:
(async function() {
for(let i = 0; i < 3; i++) {
const result = await exampleFunction();
if(result === 0) break;
}
})();
Hopefully this gives you some ideas
async function poll(f, threshold = 3) {
if (!threshold) {
throw new Error("Value not found within configured amount of retries");
}
const result = await f();
if (result >= 0.5) {
return result;
} else {
return await poll(f, threshold - 1);
}
}
async function task() {
return Math.random();
}
poll(task).then(console.log).catch(console.error);
I have a very large array (10K) and i want to split it (i did according to this: https://stackoverflow.com/a/8495740/2183053 and it worked) but i need to pass the tempArray to request and wait for response which will be passed to savetodb.
Can someone help me out please. To be clear, I want to split the large Array and pass the separated arrays to request function then pass to save to db and continue this process until all the arrays are cleared.
the following is the code i did:
//i used waterfall because it worked in waiting to finish the first task before starting another
async.waterfall([
async.apply(handleFile, './jsonFile.json')
runRequest1,
savetoDb
], function(err) {
console.log('waterfall1 complete')
})
function handleFile(data, callback) {
var name
var authorNames = []
require('fs').readFile(data, 'utf8', function(err, data) {
if (err) throw err
var content = _.get(JSON.parse(data), [2, 'data'])
for (var x = 0; x < content.length; x++) {
authorNames.push(JSON.stringify(content[x]['author']))
}
//the large array is authorNames and im splitting it below:
for (i = 0, j = authorNames.length; i < j; i += chunk) {
temparray = authorNames.slice(i, i + chunk);
setTimeout(function() {
callback(null, temparray)
}, 2000);
}
})
}
You need promise to handle async things in nodejs
let findAllReceipts = function (accountArray) {
const a = [];
for (let i = 0; i < accountArray.length; i++) {
a.push(new Promise((resolve, reject) => {
receiptTable.find({accountNo: accountArray[i].accountNo}, function (err, data) {
if (!err) {
resolve(data);
} else {
reject(new Error('findPrice ERROR : ' + err));
}
});
}));
}
return Promise.all(a);
};
I added some promise.
const data = await handleFile('./jsonFile.json');
// save to db
async function handleFile(filePath) {
let arrayWillReturn = [];
var name
var authorNames = []
let data = await getFileData(filePath)
var content = _.get(JSON.parse(data), [2, 'data'])
for (var x = 0; x < content.length; x++) {
authorNames.push(JSON.stringify(content[x]['author']))
}
//the large array is authorNames and im splitting it below:
for (i = 0, j = authorNames.length; i < j; i += chunk) {
arrayWillReturn.push(authorNames.slice(i, i + chunk));
}
return arrayWillReturn;
}
async function getFileData(fileName) {
return new Promise(function (resolve, reject) {
fs.readFile(fileName, type, (err, data) => {
err ? reject(err) : resolve(data);
});
});
}
I'm answering my own question for future references. the only thing that I added to make my code work is Promise. It sounds easy but it took me a while to grasp the function and implementing it but it worked and it was worth it. Thank you so much for the responses.
I have a generator, which generates lots of, but not infinitely many values. I want to call a high-latency function on each of them with n of them concurrently.
I tried to use async.eachLimit like this. But it prints only 3 numbers, not all of them.
async = require('async');
function* generator(start) {
for (var i = start; i < start + 100; ++i) {
yield i;
}
}
async.eachLimit(generator(17), 3, function(value) {
console.log(value);
});
What’s the right way to achieve this?
I think because you're not passing in an async function it's erroring out on you
'use strict';
const async = require('async');
async function* generator(start) {
for (var i = start; i < start + 100; ++i) {
yield i;
}
}
(async () => {
await async.eachLimit(generator(17), 3, async (value) => {
console.log(value);
});
})().catch(e => console.error);
Works for me;