How to slowdown making requests? - javascript

I have problem with too fast requesting. When my script makes too many requests per second Google throws an error net::ERR_INSUFFICIENT_RESOURCES
I want to make one request per 20ms.
How I can achieve that?
This is how my main function look at this moment...
const symbolsArr = reader.result.split("\n").map((str) => str.trim()); //symbolsArr is just rergular array, except it's source is from .txt file
function loopFunction(error_symbol) {
for (const symbol of symbolsArr) {
setTimeout(getData(symbol), 5000); //I tried to use setTimeout but it not helps
}
return console.log(error_symbol);
}
loopFunction(error_symbols);
And my fetcher...
error_symbols = [];
function getData(symbol) {
fetch(
`https://cors-anywhere.herokuapp.com/` +
`https://eodhistoricaldata.com/api/fundamentals/${symbol}?api_token= (don't look at my secret token :))`,
{
method: "get",
}
)
.then((res) => {
if (res.ok) {
return res.json();
} else {
throw new Error(`Symbol ${symbol} is 'empty'`);
}
})
.then((data) => {
console.log(data);
var myJSON = JSON.stringify(data);
saveFile(myJSON, `${symbol}-json.txt`, "text/plain");
})
.catch((error) => {
error_symbols.push(symbol);
throw error + symbol;
});
}
Pretty simple, I have to somehow put cooldown on fetcher

try this
const symbolsArr = reader.result.split("\n").map((str) => str.trim()); //symbolsArr is just rergular array, except it's source is from .txt file
async function loopFunction(error_symbol) {
for (const symbol of symbolsArr) {
await setTimeout(getData(symbol), 5000); //I tried to use setTimeout but it not helps
}
return console.log(error_symbol);
}
loopFunction(error_symbols);
the fetch api is using async promises to handle requests.
i am pretty sure u need to set a await to make the loop wait on each loopin till the fulfillment is done.

Related

Synchronize API calls in a for loops VueJS

I am trying to get synchronized flow inside a for loop which each loop having to call a API.
The flow is summarized as follows:
A Demo1() function containing forloop executes one by one, however in each loop it does a API call.,
A MainFunction() retrieves after the for loop has executed and does the final API call.
Supporting API calls for the same function is shown in code and seems quite self explanatory
The code structure is as follows:
</script>
...
async MainFunction() {
if (dataPresent) {
let clientFieldDetails = await this.Demo1()
.then(() => {
//This does the final API call based on the results fetched from for loop
this.processFinalAPICall();
})
.catch(err => {
console.log("Something went wrong... ", err);
});
} else {
console.log(
"No data.."
);
}
},
async Demo1() {
//Traversing around each fieldInfo
//this.dataPresent.items.forEach(item => {
//Replacing with normal for loop
for(item of this.dataPresent.items){
if (item.model) {
//Upload item model API Call
this.uploadItem(item.model)
.then(response => {
//PUT API Call
this.putItemModel().then(response => {
var result = response.data;
//Add itemModel fetched from API response
models.push(result);
console.log(result)
});
})
.catch(err => console.log("Axios err: ", err));
} else {
//Inside item price details
console.log("inside item price");
//API call for fetching price info
this.getitemPriceInfo(item.id);
}
});
},
getitemPriceInfo(itemid){
if(itemid){
//API call for fetching price info
this.getPriceinEuro(itemid);
itemsPrice.push(result)
}
else{
this.getPriceWorldWide().then(response => {
if(response.data === "item not created")
//Create new item API call
this.createNewItem(item.id)
else{
var result = response.data;
//Fetched API response for itemsPrice
itemsPrice.push(result);
console.log(result);
//Update item API call
this.updatePriceItem(item.id);
}
});
}
},
//EDIT: Adding API call
async getPriceinEuro(itemId) {
await this.$axios
.get("/auth/getEuroPrice", {
params: {
itemID: itemId
}
})
.then(response => {
console.log(" Resp :" + response.data);
let result = response.data;
//This gives me the price
itemsPrice.push(result)
});
},
processFinalAPICall(){
//get itemsPrice and models price pushed
var apiBodyModel = [];
this.models.forEach(model=>{
var uri = {
url: /modelInfo/+model
}
apiBodyModel.push(uri)
})
var apiBodyPrice = [];
this.itemsPrice.forEach(price=>{
var uri = {
url: /priceInfo/+price
}
apiBodyPrice.push(uri)
})
//Create a post body request from above data and POST
....
}
...
</script>
The code currently loops in for loop and doesnt wait for the API calls to finish. It executes processFinalCall() first and then the API calls. I am not sure about async/await, if I used it wrong please excuse. How do i get the the forLoop executed first and then the initiating the processFinalAPICall() from MainFunction?
Please let me know if I am doing it the right way. Thanks :)
I am using Node version 8.11 due to our project dependencies.
EDITED: Added API function Call for reference
I'm fairly sure that the problem you have lies in your Demo1 function, which, very broadly, looks like this:
async Demo1() {
[ARRAY OF ITEMS].forEach(item => {
this.uploadItem(item)
.then(response => {
// do some stuff
});
.catch(err => /* log errors */);
});
}
You don't await the upload here, so when you call Demo1() in MainFunction(), it'll go through the uploads without waiting for the previous one to finish first. I think the easiest way to get around this would be to use a for-of loop instead, since you pass a function to .forEach, and that just complicates things.
So instead of [ARRAY OF ITEMS].forEach(item => { ..., you can do this:
async Demo1() {
for(let item of [ARRAY OF ITEMS]) {
await this.uploadItem(item);
}
}
And your modified Demo1 function would look like this:
async Demo1() {
//Traversing around each fieldInfo
for (let item of this.dataPresent.items) {
if (item.model) {
//Upload item model API Call
await this.uploadItem(item.model)
//PUT API Call
let response = await this.putItemModel();
var result = response.data;
//Add itemModel fetched from API response
models.push(result);
console.log(result)
} else {
//Inside item price details
console.log("inside item price");
//API call for fetching price info
this.getitemPriceInfo(item.id);
}
};
}
Note that I haven't tested this, because I don't have the full code to plug it into, so I can't fully guarantee that it'll work. If it doesn't, let me know, so I can help fix it
Here's a bad mockup of this solution.
I guess its a simple change over from what #marsnebulasoup added in his mockup solution. From your comments I tried to replace upload method with axios call. You are missing return keyword after the function call , i.e,
upload(item) {
console.log("Iten value:"+ item);
return axios.get("https://jsonplaceholder.typicode.com/posts/" +item)
.then(response => {
console.log("Done processing API call (Times:"+ item)
console.log(response.data);
})
.catch(console.log);
A simple code changeover is designed based on previous answer by #marsnebulasoup in Replit:
EDIT: Added correct reference:
Code Reference: https://replit.com/join/comkahdm-gagangowda89
Cheers.

How can multiple unconnected async events await a single promise

I have a system where I need an id from a server to handle events. I should only fetch the id if/when the first event happens, but after that, I need to use the same id for each subsequent event. I know how to use async-await etc. so I have some code like this
var id = "";
async function handleEvent(e) {
if (! id ) {
let response = await fetch(URL)
if (response.ok) {
let json = await response.json();
id = json.id ;
}
}
// use id to handle event
}
But my problem is that I could receive multiple events before I receive a response, so I get multiple overlapping calls to fetch a new id.
How can I have multiple asynchronous calls to handleEvent, with the first one processing the fetch and any subsequent call waiting for it to complete to access the result?
Create a function to ensure you only make one request for the id using a lazy promise.
const URL = 'whatever'
let idPromise // so lazy 🦥
const getId = async () => {
const response = await fetch(URL)
if (!response.ok) {
throw response
}
return (await response.json()).id
}
const initialise = () {
if (!idPromise) {
idPromise = getId()
}
return idPromise
}
// and assuming you're using a module system
export default initialise
Now all you have to do is prefix any other call with initialise() to get the ID which will only happen once
import initialise from 'path/to/initialise'
async function handleEvent(e) {
const id = await initialise()
// do stuff with the ID
}
The currently accepted response relies on a global variable, which is not ideal. Another option is to use a class.
class IDManager {
getId(URL) {
if (this.id) {
return this.id;
}
this.id = fetch(URL)
return this.id
}
}
Then when you call getId, you simply await the result. If no previous request has been made, a network request will be sent. If there is already a pending request, every call will await the same result. If the promise is already resolved, you will get the result immediately.
const idManager = new IDManager();
async function handleEvent() {
const id = await idManager.getId(URL);
// Do stuff with the ID.
}
Not clear why your function is parameterized by "e".
I would write it in a more straightforward manner:
async function request(URL) {
let response = fetch(URL)
if (response.ok) {
let json = await response.json();
return json.id;
}
return false;
}
Then if the sequence of your calls matters write them one by one.
If not then you could use Promise.all (Promise.allSettled maybe) to run them all at once.
https://developer.mozilla.org/ru/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
The solution to this turned out to be a little different from the previous answers. I thought I would post how I made it work in the end. The answers from #phil and #vaelin really helped me to figure this out.
Here was my solution...
class IDManager {
async fetchID (resolve,reject ) {
const response = await fetch( URL, { } ) ;
const id = await response.json() ;
resolve( id );
}
async getID() {
if ( this.id === undefined ) {
if ( this.promise === undefined ) {
var self = this;
this.promise = new Promise( this.fetchID ).then( function(id) { self.id = id;} );
}
await this.promise;
}
return this.id;
}
}
The problem was that awaiting the fetch the getID call took a couple of seconds. During that time there were often multiple calls to getID, all of which initiated another fetch. I avoided that by wrapping the fetch and response.json calls in another promise which was created instantly, and so avoided the duplicates.

Run HTTP requests in chunks

I want to run 1 thundered http requests in configurable chunks, and set configurable timeout between chunk requests. The request is based on the data provided with some.csv file.
It doesn't work because I am getting a TypeError, but when I remove () after f, it doesn't work either.
I would be very grateful for a little help. Probably the biggest problem is that I don't really understand how exactly promises work, but I tried multiple solutions and I wasn't able to achieve what I want.
The timeout feature will probably give me even more headache so I would appreciate any tips for this too.
Can you please help me to understand why it doesn't work?
Here is the snippet:
const rp = require('request-promise');
const fs = require('fs');
const { chunk } = require('lodash');
const BATCH_SIZE = 2;
const QUERY_PARAMS = ['clientId', 'time', 'changeTime', 'newValue'];
async function update(id, time, query) {
const options = {
method: 'POST',
uri: `https://requesturl/${id}?query=${query}`,
body: {
"prop": {
"time": time
}
},
headers: {
"Content-Type": "application/json"
},
json: true
}
return async () => { return await rp(options) };
}
async function batchRequestRunner(data) {
const promises = [];
for (row of data) {
row = row.split(',');
promises.push(update(row[0], row[1], QUERY_PARAMS.join(',')));
}
const batches = chunk(promises, BATCH_SIZE);
for (let batch of batches) {
try {
Promise.all(
batch.map(async f => { return await f();})
).then((resp) => console.log(resp));
} catch (e) {
console.log(e);
}
}
}
async function main() {
const input = fs.readFileSync('./input.test.csv').toString().split("\n");
const requestData = input.slice(1);
await batchRequestRunner(requestData);
}
main();
Clarification for the first comment:
I have a csv file which looks like below:
clientId,startTime
123,13:40:00
321,13:50:00
the file size is ~100k rows
the file contains information how to update time for a particular clientId in the database. I don't have an access to the database but I have access to an API which allows to update entries in the database.
I cannot make 100k calls at once, because: my network is limited (I work remotely because of coronavirus), it comsumpts a lot of memory, and API can also be limited and can crash if I will make all the requests at once.
What I want to achieve:
Load csv into memory, convert it to an Array
Handle api requests in chunks, for example take first two rows from the array, make API call based on the first two rows, wait 1000ms, take another two rows, and continue processing until the end of array (csv file)
Well, it seems like this is a somewhat classic case of where you want to process an array of values with some asynchronous operation and to avoid consuming too many resources or overwhelming the target server, you want to have no more than N requests in-flight at the same time. This is a common problem for which there are pre-built solutions for. My goto solution is a small piece of code called mapConcurrent(). It's analagous to array.map(), but it assumes a promise-returning asynchronous callback and you pass it the max number of items that should ever be in-flight at the same time. It then returns to you a promise that resolves to an array of results.
Here's mapConcurrent():
// takes an array of items and a function that returns a promise
// returns a promise that resolves to an array of results
function mapConcurrent(items, maxConcurrent, fn) {
let index = 0;
let inFlightCntr = 0;
let doneCntr = 0;
let results = new Array(items.length);
let stop = false;
return new Promise(function(resolve, reject) {
function runNext() {
let i = index;
++inFlightCntr;
fn(items[index], index++).then(function(val) {
++doneCntr;
--inFlightCntr;
results[i] = val;
run();
}, function(err) {
// set flag so we don't launch any more requests
stop = true;
reject(err);
});
}
function run() {
// launch as many as we're allowed to
while (!stop && inflightCntr < maxConcurrent && index < items.length) {
runNext();
}
// if all are done, then resolve parent promise with results
if (doneCntr === items.length) {
resolve(results);
}
}
run();
});
}
Your code can then be structured to use it like this:
function update(id, time, query) {
const options = {
method: 'POST',
uri: `https://requesturl/${id}?query=${query}`,
body: {
"prop": {
"time": time
}
},
headers: {
"Content-Type": "application/json"
},
json: true
}
return rp(options);
}
function processRow(row) {
let rowData = row.split(",");
return update(rowData[0], rowData[1], rowData[2]);
}
function main() {
const input = fs.readFileSync('./input.test.csv').toString().split("\n");
const requestData = input.slice(1);
// process this entire array with up to 5 requests "in-flight" at the same time
mapConcurrent(requestData, 5, processRow).then(results => {
console.log(results);
}).catch(err => {
console.log(err);
});
}
You can obviously adjust the number of concurrent requests to whatever number you want. I set it to 5 here in this example.

use fetch to load paginated data recursively

I would like to fetch data from this API: "https://swapi.co/api/planets". The data is paginated like
https://swapi.co/api/planets/?page=1, https://swapi.co/api/planets/?page=2....
I want to build an API which accepts url, page, and callback as arguments. It will fetch all of the data page by page, until it reaches the page specified in the arguments.
E.g.
function loadData(https://swapi.co/api/planets, 5, cb)
this will load data from page1, page2, page3, page4 and page5.
Here is my attempt however it doesn't work. Can someone please point me in the right direction ?
function fn(url, page, pages, cb) {
return new Promise((resolve, reject) => {
const endpoint = `${url}/?page=${page}`;
fetch(endpoint).then(response => {
if (response.status !== 200) {
throw `${response.status} ${response.statusText}`;
}
response.json().then(data => {
cb(data);
if (page <= pages) {
fn(url, page + 1, pages, cb)
.then(resolve)
.catch(reject);
} else {
resolve();
}
});
});
});
}
function cb(data) {
console.log(data)
}
fn('https://swapi.co/api/planets', 1, 3, cb).then(() => {});
Follow up question: if instead of passing in a callback as the argument, I want to pass in an array to collect the data loaded, how should I tweak this API?
As has been said before, you probably don't need to wait for each call to finish before doing the next, but if you really want to I just have to say I find it much easier to parse (as a human) using async / await.
I've done a quick basic version here in this snippet for you to see, with no error handling or checking the data is right. Maybe this approach might suit you better?
async function getPage(page) {
let response = await fetch(`https://swapi.co/api/planets/?page=${page}`);
let data = await response.json()
return data;
}
async function getPages(startPage, endPage) {
let currentPage = startPage;
let finalArr = [];
while(currentPage <= endPage) {
const pageData = await getPage(currentPage);
console.log('adding page', currentPage);
finalArr.push(...pageData.results);
currentPage++;
}
console.log(finalArr)
}
getPages(1, 3);
Since each request does not depend on the previous, I would use Promise.all method instead of requesting each page when the previous request has completed.
You can do something like:
function loadData(url, pages, cb) {
const urls = [];
for (let i = 1; i <= pages; i++) {
urls.push(`${url}/?page=${i}`);
}
return Promise.all(urls.map(url => fetch(url).then(response => response.json())))
.then(responses => {
var results = responses.reduce((accum, response) => [...accum, ...response.results], []);
cb(results);
return results;
});
}

Perform asynchronous actions semi-synchronously in Axios

I have the following code:
* Fetch stats from api
*/
fetchStats() {
this._isFetching = true;
// fetch stats after building url and replacing invalid characters
return new Promise(async (resolve, reject) => {
await API.fetchStats(this.rsn)
.then(jres => {
this.skills = jres.main.skills;
this._isFetching = false;
resolve('success');
})
.catch(err => {
console.log(err);
console.log('error retreiving stats');
this._isFetching = false;
reject('Failed to retreive stats');
})
.finally(() => {
this._isFetching = false;
});
});
}
I thought making it async with await would make it wait until it got the response before continuing. Returning the promise is something I added in testing to see if I could make it synchronous.
Then my code that consumes this method:
memberCollection.forEach(async el => {
await el.player.fetchStats()
.then(() => {
console.log(`Refreshed ${el.player.rsn}'s account`);
})
.catch(console.log(`Failed to refresh ${el.player.rsn}'s account`));
});
My thinking was that it would wait till it got a response then console.log either a successful refresh or a failed refresh. What I am instead seeing is a whole bunch of "success" messages followed by a string of failed messages indicating that it is running both the then and the catch message in the foreach. Does anyone know how I can make this work.
My issue is that Axios keeps timing out (my speculation is that it is due to the number of requests being sent off and the fact that there is a 5-10sec delay as it pulls from the db), if I navigate to the API URL manually it works as well as if I just do one member (as opposed to forEach) it works fine. So I'm trying to limit the number of requests fired off at once. I have tried setting my axios timeout to 10, 20, and 60 seconds, but it made no improvement.
Solution code:
const asyncForEach = async (arr, cb) => {
for(let i=0;i<arr.length;i++) {
let el = arr[i];
try {
let res = await cb(el);
} catch (err) { console.log(err) };
if(el.player && el.player.rsn) console.log(`Processed ${el.player.rsn}`);
}
console.log('done processing in asyncForEach');
}
not linked to axios but to async await.
consider
function slow(i){
return new Promise((ok,ko)=>{
return setTimeout(_=>ok(i), 1000)
})
}
async function asyncForEach(arr, cb){
for(var i = 0; i<arr.length; ++i){
let el = arr[i];
let res = await cb(el);
console.log('async', res, new Date)
}
}
/*
#foreach does not wait, but async and reduce are spaced by one second
foreach 4 2019-10-14T13:43:47.059Z
foreach 5 2019-10-14T13:43:47.071Z
foreach 6 2019-10-14T13:43:47.071Z
async 1 2019-10-14T13:43:47.071Z
async 2 2019-10-14T13:43:48.073Z
async 3 2019-10-14T13:43:49.074Z
reduce 7 2019-10-14T13:43:50.076Z
reduce 8 2019-10-14T13:43:51.078Z
reduce 9 2019-10-14T13:43:52.080Z
*/
async function main(){
await [4,5,6].forEach(async el=>{
let res = await slow(el);
console.log('foreach', res, new Date)
})
await asyncForEach([1,2,3], slow);
await [7,8,9].reduce((acc, el)=>acc.then(async _=>{
let res = await slow(el);
console.log('reduce', res, new Date);
return;
}), Promise.resolve())
}
main();
As you can see from timestamps, forEach does not wait for slow to finish
however, asyncForEach in its iteration does wait
What you may want to do is either
write a for loop as done with asyncForEach
use standard promises (stacking them):
[1,2,3].reduce((acc, el)=>acc.then(_=>{
return slow(el);
}), Promise.resolve())

Categories