loop synchronously through array while calling asynchronous function - javascript

My problem: i have an array with users and an user_hash and need to find the matching user. I came up with the following solution:
//results is the array user objects
//safety_first.getUserHash is a function that calculates a user_hash, it retrieves the user-record based on the id and then combines values of some fields and a key and turns this into a hash.
if (results.length > 0)
{
var i = 0;
function checkUserhash(user_id, user_hash, callback) {
safety_first.getUserHash(user_id, function(check_user_hash) {
if (user_hash == check_user_hash)
{
callback(user_id);
}
else
{
if ((i+1) < results.length)
{
i++;
checkUserhash(results[i].id, user_hash, callback);
}
else
{
callback(false);
}
}
});
}
checkUserhash(results[i].id, user_hash, function(user_id) {
if (user_id)
{
console.log("MATCH: "+user_id);
}
else
{
console.log("NO MATCH");
}
});
}
I first tried to do this in a for-loop but cause it calls the checkUserhash asychronously i could not break the loop when the match was found.
I'm looking for other possible solutions, please share your thoughts.
regards, Pieter

You can map over your user array to create an array of promises. Use Promise.all to wait for those promises to resolve and then iterate over the responses to check whether the hash matches the id.
In this example I've used async/await. I've also mocked up a hashing function routine so you can see it in action. Simply change the resolve from 'id' to 'id + 1' and rerun the demo to see the Match become No match.
Hope this is some use.
const safety_first = {
getUserHash(id) {
return new Promise(resolve => {
setTimeout(() => resolve(id), 1000);
});
}
}
async function checkUserHashes(userList, promises) {
try {
// await the promises to all resolve
// `Promise.all` preserves the order...
const res = await Promise.all(promises);
// ...which makes it useful if you need to check the returned
// results against the promise-producing array
userList.forEach(({ id }, i) => {
if (id === res[i]) console.log(`${id}|${res[i]} - Match`);
if (id !== res[i]) console.log(`${id}|${res[i]} - No match`);
});
} catch (e) {
console.log(e);
}
};
const userList = [{ id: 1, userHash: 1 }, { id: 2, userHash: 2 }];
// `map` over the fields and return a
// validation promise for each
const promises = userList.map(({ id }) => {
return safety_first.getUserHash(id);
});
// Pass in the original userList, and the generated promises
checkUserHashes(userList, promises);
Update: if you want to break out of the loop when a match has been found that's a little easier:
const safety_first = {
getUserHash(id) {
return new Promise(resolve => {
// This will match the second object (index 1)
setTimeout(() => resolve(id === 1 ? id: id + 1), 1000);
});
}
}
async function checkUserHashes(userList) {
// iterate over the array of objects
for (let [index, obj] of userList.entries()) {
// await for that one hash check
const res = await safety_first.getUserHash(obj.id);
// if it matches return the id
if (obj.id === res) return obj;
}
// otherwise return null if there are no matches
return null;
}
const userList = [{ id: 0, userHash: 1 }, { id: 1, userHash: 2 }];
(async () => {
const id = await checkUserHashes(userList);
console.log(id);
})();

Related

How can I retry a function with try/catch in react js?

I have a function with try catch inside. I call to the API about response. This response returns me a Result object with some data. But sometimes I'd like to retry the function if I have no name and have bundle_id.
I'd like to retry this functions max 3 times. If no result, I'd like to throw an error.
I have sth like this, but it doesn't work for me.
const getECommerceProduct = async ({
item,
getParent = false,
variantInfo,
}) => {
if (!item.id) return null;
let retryCounter = 0;
try {
const response = await getEvaService(Core.GetProductDetail, {
ID: item.id,
});
const itemResponse = response?.Result;
if (!itemResponse) return null;
// Retry the function if the item has no name and has a bundle product.
// The bundle product will give back more information about the product.
const retry = !!itemResponse.bundle_id && !itemResponse.product_name;
if (retry && !getParent && retryCounter <= 3) {
retryCounter += 1;
return getECommerceProduct({
item: {
id: itemResponse.bundle_id,
quantity: item.quantity,
variantInfo,
},
getParent: true,
});
}
return transformProductToECommerce(itemResponse, item);
} catch (e) {
console.error(e);
return null;
}
};
Could you help me?
You could use a state to track the attempt count.
[retry_count, setRetryCount] = useState(0);
useEffect(() => {
if (retry_count < 3) {
// .... put fetch code here
}
}, [retry_count])
Increment the retry_count every time by putting it in the catch block within your fetch function
React.useEffect(() => {
const getECommerceProduct = async ({ item, getParent = false, variantInfo }) => {
if (!item.id) return null;
const response = await getEvaService(Core.GetProductDetail, {
ID: item.id,
});
const itemResponse = response?.Result;
if (!itemResponse) return null;
return transformProductToECommerce(itemResponse, item);
};
if (retry_count < 3) {
getECommerceProduct().catch((e) => {
console.error(e);
setRetryCount(retry_count + 1);
return null;
});
}
}, [retry_count]);
With the useEffect, this means that every time the retry_count is incremented (in this case an error happens), the function within the useEffect call will be performed.

How to make async/wait inside of for loop?

I'm using async await inside of for loop as below.
for (let i = 0; i < result.length; i += 1) {
try{
const client = await axios.get(
`${process.env.user}/client/${result[i].id}`
);
} catch(error){
console.log(error)
}
if (client.data.success === true) {
result[i].Name = rider.data.client.Name;
result[i].PhoneNumber = rider.data.client.Number;
}
}
But I want to make this using 'new Promise' and 'promiss.all' to make it asynclously.
But I don'k know how to make this correctly doing error handle well.
Could you recommend some advice for this? Thank you for reading it.
This can be a basic solution, i think
let playList = []
for (let i = 0; i < result.length; i += 1) {
playList.push(axios.get(
`${process.env.user}/client/${result[i].id}`
).then(res => {
if (res.data.success === true) {
result[i].Name = rider.data.client.Name;
result[i].PhoneNumber = rider.data.client.Number;
}
}).catch(ex => console.log(ex)));
}
await Promise.all(playList)
This can also be done by using a foreach loop.
The for/foreach loop can be simplified by using a map function.
Js map function is equivalent of c# select linq function.
The fat arrow in js map function is not bound to return a value unlike c# select inner function which must return a value.
await Promise.all(result.map(async r => {
let client;
try {
client = await axios.get(`${process.env.user}/client/${r.id}`);
} catch (error) {
console.log(error)
}
if (client.data.success === true) {
r.Name = rider.data.client.Name;
r.PhoneNumber = rider.data.client.Number;
}
}));
Try this
var promises = result.map(r => axios.get(`${process.env.user}/client/${r.id}`);
Promise.all(promises).then(function(values) {
console.log('All promises done');
});
The idea is that if you are awaiting something, that is promise, you can await it, or call it to get promise
Example:
function Foo()
{
return new Promise(...); // Promise of int for example
}
you can do
var p = Foo(); //you will get promise
Or
var v = await Foo(); // you will get int value when promise resolved
This is how you do it with async/await + Promise.all:
const myResult = await Promise.all(result.map(({ id }) => {
return axios.get(`${process.env.user}/client/${id}`);
}));
// deal with the result of those requests
const parsed = myResult.map(data => /* your code here */);
Here is an example using Array.map to call your function along with Promise.all. I wrapped the axios request in a function so if one of your request fails, it wont stop every other requests. If you don't mind stopping when you got an issue, look at others answers to your question.
function fakeRequest() {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve({
data: {
success: true,
client: {
Name: 'Todd',
Number: 5,
},
},
});
}, 300);
});
}
(async() => {
const result = [{}, {}, {}];
await Promise.all(result.map(async(x, xi) => {
try {
const client = await fakeRequest();
if (client.data.success === true) {
result[xi].Name = client.data.client.Name;
result[xi].PhoneNumber = client.data.client.Number;
}
} catch (err) {
console.log(err)
}
}));
console.log(result);
})();

Wrap a resultset callback function with a generator/iterator

I'm working on converting a legacy callback-based API into an async library. But I just can't wrap my head around getting a "resultset" to work as a generator (Node 10.x).
The original API works like this:
api.prepare((err, rs) => {
rs.fetchRows(
(err, row) => {
// this callback is called as many times as rows exist
console.log("here's a row:", row);
},
() => {
console.log("we're done, data exausted");
}
);
});
But here is how I want to use it:
const wrapped = new ApiWrapper(api);
const rs = await wrapped.prepare({});
for (let row of rs.rows()) {
console.log("here's a row:", row);
}
let row;
while(row = await rs.next()) {
console.log("here's a row:", row);
}
I thought I had it under control with generators, but it looks like you cannot use yield inside a callback. It actually seems logical if you think about.
class ApiWrapper {
constructor(api) {
this.api = api;
}
prepare() {
return new Promise((resolve, reject) => {
this.api.prepare((err, rs) => {
if (err) {
reject(err);
} else {
resolve(rs);
}
});
});
}
*rows() {
this.api.fetchRows((err, row) => {
if (err) {
throw err;
} else {
yield row; // nope, not allowed here
}
});
}
next() { ... }
}
So what alternatives do I have?
Important: I don't want to store anything in an array then iterate that, we're talking giga-loads of row data here.
Edit
I'm able to simulate the behavior I want using stream.Readable but it warns me that it's an experimental feature. Here's a simplified array-based version of the issue I'm trying to solve using stream:
const stream = require('stream');
function gen(){
const s = new stream.Readable({
objectMode: true,
read(){
[11, 22, 33].forEach(row => {
this.push({ value: row });
});
this.push(null)
}
});
return s;
}
for await (let row of gen()) {
console.log(row);
}
// { value: 11 }
// { value: 22 }
// { value: 33 }
(node:97157) ExperimentalWarning: Readable[Symbol.asyncIterator] is an experimental feature. This feature could change at any time
I finally realized I needed something similar to Go's channels that were async/await compatible. Basically the answer is to synchronize an async iterator and a callback, making them wait for each other as next() iterations are consumed.
The best (Node) native solution I found was to use a stream as an iterator, which is supported in Node 10.x but tagged experimental. I also tried to implement it with the p-defer NPM module, but that turned out to be more involved than I expected. Finally ran across the https://www.npmjs.com/package/#nodeguy/channel module, which was exactly what I needed:
const Channel = require('#nodeguy/channel');
class ApiWrapper {
// ...
rows() {
const channel = new Channel();
const iter = {
[Symbol.asyncIterator]() {
return this;
},
async next() {
const val = await channel.shift();
if (val === undefined) {
return { done: true };
} else {
return { done: false, value: val };
}
}
};
this.api.fetchRows(async (err, row) => {
await channel.push(row);
}).then(() => channel.close());
return iter;
}
}
// then later
for await (let row of rs.rows()) {
console.log(row)
}
Note how each iterating function core, next() and rows(), have a await that will throttle how much data can be pushed across the channel, otherwise the producing callback could end up pushing data uncontrollably into the channel queue. The idea is that the callback should wait for data to be consumed by the iterator next() before pushing more.
Here's a more self-contained example:
const Channel = require('#nodeguy/channel');
function iterating() {
const channel = Channel();
const iter = {
[Symbol.asyncIterator]() {
return this;
},
async next() {
console.log('next');
const val = await channel.shift();
if (val === undefined) {
return { done: true };
} else {
return { done: false, value: val };
}
}
};
[11, 22, 33].forEach(async it => {
await channel.push(it);
console.log('pushed', it);
});
console.log('returned');
return iter;
}
(async function main() {
for await (let it of iterating()) {
console.log('got', it);
}
})();
/*
returned
next
pushed 11
got 11
next
pushed 22
got 22
next
pushed 33
got 33
next
*/
Like I said, Streams and/or Promises can be used to implement this, but the Channel module solves some of the complexity that make it more intuitive.
The original question has two nested callback taking async functions
api.prepare((err,res) => ...)
rs.fetchRows((err,res) => ...)
The first one runs the callback only once so just promisifying it as follows is sufficient.
function promisfied(f){
return new Promise((v,x) => f(x,v));
}
However the second function will invoke it's callback multiple times and we wish to generate an async iterable from this function such that we can consume it in a for await of loop.
This is also possible by employing async generators as follows;
async function* rowEmitterGenerator(rs){
let _v, // previous resolve
_x, // previous reject
_row = new Promise((v,x) => (_v = v, _x = x));
rs.fetchRows((err, row) => ( err ? _x(err) : _v(row)
, _row = new Promise((v,x) => (_v = v, _x = x))
));
while(true){
try {
yield _row;
}
catch(e){
console.log(e);
}
}
}
Then putting all together in a top level await context;
const rows = await promisified(api.prepare),
rowEmitter = rowEmitterGenerator(rows);
for await (let row of rowEmitter){
console.log(`Received row: ${row}`);
// do something with the row
}

JavaScript: Returning array from recursive function

I have made a class which builds some data from api:
const http = require("http");
class VideoService {
constructor() {
this.items = [];
}
fetchVideos(token = "") {
const url = `https://www.example.com`;
http.getJSON(url).then((results) => {
results.items.forEach((item, index) => {
const vid = item.snippet.resourceId.videoId;
this.items.push({
title: item.title,
date: item.publishedAt
});
console.log(this.items.length); // here length inreases, works here
});
if (typeof results.nextPageToken !== "undefined") {
return this.fetchVideos(results.nextPageToken);
}
});
}
getVideos() {
this.fetchVideos();
console.log(this.items.length); // this returns 0 instead of all items fetched
return this.items;
}
}
module.exports = VideoService;
In another file, I am using it like this:
const videoService = require("../shared/videoService");
const videos = (new videoService()).getVideos();
console.log(videos);
The last console.log call always returns empty array instead of all data collected in items property of the above class.
Can anybody tell what I am missing here?
This happens because in your function fetchVideos(), you are making an http call which will be processed asynchronously. You can try to process it this way.
fetchVideos(token = "") {
const url = `https://www.example.com`;
return http.getJSON(url).then((results) => {
results.items.forEach((item, index) => {
const vid = item.snippet.resourceId.videoId;
this.items.push({
title: item.title,
date: item.publishedAt
});
console.log(this.items.length); // here length inreases, works here
});
if (typeof results.nextPageToken !== "undefined") {
return this.fetchVideos(results.nextPageToken);
}
else return new Promise((resolve, reject)=>{
resolve();
});
});
}
getVideos() {
return this.fetchVideos().then(function(){
console.log(this.items.length); // this returns 0 instead of all items fetched
return this.items;
});
}
I suggest reading about promises and asynchronicity in javascript. Check this link:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise

Function to return non-null value not working properly when saving data to MongoDB

I get an array of objects from an API call and then I filter the values depending on two keys: story_title and title. If both values are null then the object is filtered. Then I loop through the filtered array of objects to save certain data to mongodb (using mongoose) from the filtered array. The problem is that I want to save the document with one title key, so I created a function to check if story_title or title is null and return the non-null value.
The function is not working properly because the function in title, inside the for loop, is returning some null values.
function pickTitle(title, story_title) {
if (!story_title) {
return story_title;
} else {
return title
}
}
everyHour: async () => {
try {
data = await axios.get(url);
let posts = data.data.hits;
const filteredPosts = await posts.filter((elem) => {
return (!elem.title || !elem.story_title)
});
for (let filtered of filteredPosts) {
Post.updateOne({
_id: filtered.objectID,
author: filtered.author,
title: await pickTitle(filtered.title, filtered.story_title),
created_at: filtered.created_at,
},
{$setOnInsert: filtered},
{upsert: true},
function(err, numAffected) {
if (err) {
//console.log("err")
} else {
//console.log(numAffected)
}
})
.then(res => {
//console.log(res)
})
.catch(err => {
//console.log(err)
});
}
} catch(error) {
console.log(error);
}
}
There are a few issues here. I'll step through them in some comments in the code, as well as having the "solution" directly below the commented code.
You are awaiting calls that are not asynchronous... don't do that. Array.filter is not an asynchronous operation
Your function pickTitle is being awaited, when it's not asynchronous
Your resolving promises inside a loop, which is generally considered bad practice. Add your promises to an array, and resolve everyone with Promise.all()
Lastly, your "filter" logic filters on NULL title OR story_title. That means only one needs to hold true. It's possible that both could be NULL though. Thus your pickTitle function returns a null value if both happen to be NULL. If you want to always have at least one of them contain a value, you need to change the way your array.filter works.
const pickTitle = (title, story_title) => {
if (!story_title) {
return story_title;
}
return title;
};
async () => {
try {
data = await axios.get(url);
const posts = data.data.hits;
// const filteredPosts = await posts.filter(elem => (!elem.title || !elem.story_title)); <-- you are awaiting on a filter... don't do that
const filteredPosts = posts.filter(elem => (!elem.title || !elem.story_title));
const filteredPostAnotherWay = posts.filter(post => { // <-- This might be more of what you want...
let allowed = false;
if (!post.title) {
allowed = true;
}
if (!post.story_title) {
allowed = true;
}
return allowed;
});
const postUpdates = [];
for (const filtered of filteredPosts) {
// Post.updateOne({ <-- You are resolving promises inside a for loop. While it may work, it's generally not advised to do this. Resolve everything with promise.all instead....
// _id: filtered.objectID,
// author: filtered.author,
// title: await pickTitle(filtered.title, filtered.story_title),
// created_at: filtered.created_at,
// story_url: filtered.story_url
// },
// { $setOnInsert: filtered },
// { upsert: true },
// (err, numAffected) => {
// if (err) {
// // console.log("err")
// } else {
// // console.log(numAffected)
// }
// })
// .then(res => {
// // console.log(res)
// })
// .catch(err => {
// // console.log(err)
// });
postUpdates.push(
Post.updateOne({
_id: filtered.objectID,
author: filtered.author,
// title: await pickTitle(filtered.title, filtered.story_title), // <-- You are awaiting a non asynchronous function... why?
title: pickTitle(filtered.title, filtered.story_title),
created_at: filtered.created_at,
story_url: filtered.story_url
},
{ $setOnInsert: filtered },
{ upsert: true },
(err, numAffected) => {
if (err) {
// console.log("err")
} else {
// console.log(numAffected)
}
})
);
}
return Promise.all(postUpdates);
} catch (error) {
console.log(error);
}
};

Categories