Only when a post request fails should the Link stop - javascript

Lobbies.json
{"Lobbies":[]}
jsonWorker.js
const fs = require('fs');
function lobbyUpdater(name, password) {
let rawdata = fs.readFileSync('Lobbies.json');
let data = JSON.parse(rawdata);
// data.Lobbies.length = 0; // Remove this for production
let newLobby = {"Name":name, "Password":password, "Players":1, "Mode":"", "Word":""}
data.Lobbies.push(newLobby)
fs.writeFile('Lobbies.json', JSON.stringify(data), 'utf8', function(err) {
if (err) throw err;
});
}
function lobbyAvailable(name) {
let rawdata = fs.readFileSync('Lobbies.json');
let data = JSON.parse(rawdata);
for (let i = 0; i < data.Lobbies.length; i++) {
if (data.Lobbies[i].Name.toUpperCase() === name.toUpperCase()) {
return false;
}
}
return true;
}
module.exports = {
lobbyUpdater,
lobbyAvailable
};
post Request on index.js
app.post('/newLobby', (req, res) => {
console.log("Lobby Name:", req.body.lobbyName);
console.log("Lobby Password:", req.body.lobbyPassword);
const jsonWorker = require('./jsonWorker');
if (jsonWorker.lobbyAvailable(req.body.lobbyName)) {
jsonWorker.lobbyUpdater(req.body.lobbyName, req.body.lobbyPassword);
}
else {
res.sendStatus(403);
console.log("Stopped A Lobby From Being Created");
}
});
Code from react that is being followed
<Link id="testLink" to="/Waiting-For-Players"><button id="submit" onClick={sendingRequest} className="WaitingForPlayers">Create Lobby</button></Link>
sendingRequest function
function sendingRequest(event) {
event.preventDefault();
$.post("http://localhost:4000/newLobby",
{
lobbyName: document.getElementById('lobbyName').value,
lobbyPassword: document.getElementById('lobbyPassword').value
},
function (data, status) {
console.log("Data", data);
})
.fail(function(jqXHR, textStatus, errorThrown) {
console.log("Error received:", errorThrown);
console.log("Stopping the post request.");
return;
})
.then(function() {
// Follow the Link component
let linkToUse = document.getElementById("testLink");
window.location.assign(linkToUse.to);
console.log("Testing")
});
}
My goal is to only have the Link be stopped if the post request throws a 403 forbidden since the lobby name would have already been in the json. The link should go through otherwise. I believe that the issue is that the .then function isn't going through as the console.log isn't showing up but I am really not sure why it isn't working. Any help would be appreciated thanks!

Related

Javascript for loop variable is strange when writing on a file

I have this code in './utils/url.js'. it basically makes the application/x-www-form-urlencoded content form:
const ContentForm = ()=>{
let params = new URLSearchParams()
const randomString = Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15);
params.append('email', `${randomString}#gmail.com`)
return params;
}
module.exports = ContentForm;
The email parameter is a random string.
and index.js:
const axios = require('axios').default;
const fs = require('fs');
const params = require('./utils/url')
for (let i = 0; i < 1000; i++) {
const config = {
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
}
}
// sending post with data of web/application the url http://somewhere.com/my-account/
axios.post('http://somewhere.com/my-account/',params(),config, {
})
.then(function (response) {
console.log("request successfully made")
})
.catch(function (error) {
// seeing the error response code
console.log(error.response.status);
})
.finally(function () {
// always executed
fs.writeFileSync('./r.txt',String(i));
})
}
So I want that the 'i' variable be written in the ./r.txt. It actually means that which request we are sending write now. but the problem is that it is really strange in it:
look the video of r.txt changes here
You are running 1000 asynchronous operations in a loop. You start them sequentially, but they all run in parallel. Then as each one finishes each one calls fs.writeFileSync() and it's a race to see which one calls it when. It will be random in what order each one finishes, which is what I think your video shows.
You can sequence them to be in order using await like this:
const axios = require("axios").default;
const fs = require("fs");
const params = require("./utils/url");
async function run() {
for (let i = 0; i < 1000; i++) {
const config = {
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
};
// sending post with data of web/appliaction the url http://somewhere.com/my-account/
await axios
.post("http://somewhere.com/my-account/", params(), config, {})
.then(function(response) {
console.log("request succesfully made");
})
.catch(function(error) {
// seeing the error response code
console.log(error.response.status);
})
.finally(function() {
// always executed
fs.writeFileSync("./r.txt", String(i));
});
}
}
run().then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
Or, reorganized a bit to not mix await and .then() in the same function like this:
const axios = require("axios").default;
const fs = require("fs");
const params = require("./utils/url");
async function run() {
for (let i = 0; i < 1000; i++) {
const config = {
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
};
// sending post with data of web/appliaction the url http://somewhere.com/my-account/
try {
let response = await axios.post("http://somewhere.com/my-account/", params(), config, {});
console.log("request succesfully made");
} catch(error) {
console.log(error.response.status, error);
} finally {
fs.writeFileSync("./r.txt", String(i));
}
}
}
run().then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
async function writeToFile(){
for (let i = 0; i < 1000; i++) {
const config = {
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
};
// sending post with data of web/appliaction the url http://somewhere.com/my-account/
await axios
.post("http://somewhere.com/my-account/", params(), config, {})
.then(function (response) {
console.log("request succesfully made");
})
.catch(function (error) {
// seeing the error response code
console.log(error.response.status);
})
.finally(function () {
// always executed
fs.writeFileSync("./r.txt", String(i));
});
}
}

How can integrate multiple functions into my post route and pass on objects

I want to integrate the Stripe api with my project. I am already collecting all needed data and sending it to my post route.
I want to make charges to a customer and have all functions to do so, if I would invoke them all one by one. How can I integrate all functions into my post route, so it is processed all at once. Also, I do not how to pass data from one function to another, so in the end there would be a function chain with the same data. My post route and functions:
router.post("/checkout", async function (req, res, next) {
if (!req.session.cart) {
return res.redirect("/shopping-cart");
}
// You can return promise directly
let createCustomer = function () {
var param ={};
param.email = req.body.email;
param.name= req.body.name;
param.description ="";
return stripe.customers.create(param, function (err, customer) {
if (err) {
console.log("err:" + err);
}
if (customer) {
console.log("success: " + JSON.stringify(customer, null, 2));
} else {
console.log("something went wrong");
}
});
};
let createToken = function () {
let param ={};
param.card = {
number: req.body.card,
exp_month: req.body.exp_month,
exp_year: req.body.exp_year,
cvc: req.body.security
}
return stripe.tokens.create(param, function (err, token) {
if (err) {
console.log("err:" + err);
console.log(param);
}
if (token) {
console.log("success: " + JSON.stringify(token, null, 2));
console.log(req.body);
} else {
console.log("something went wrong");
}
});
};
let addCardToCustomer = function () {
console.log(createdCustomer);
return stripe.customers.createSource(customer.id, {source: token.id}, function (err, card) {
if (err) {
console.log("err:" + err);
console.log(param);
}
if (card) {
console.log("success: " + JSON.stringify(card, null, 2));
} else {
console.log("something went wrong");
}
});
};
try {
const createdCustomer = await createCustomer(); // promise 1
const createdToken = await createToken();
const addedCardToCustomer = await addCardToCustomer(createdCustomer,createdToken ); // await addCardToCustomer(createdCustumer); to pass created customer info to next request
// const chargeCustomerThroughCustomerID = await chargeCustomerThroughCustomerID(); // promise 3
// more things...
res.send("success");
} catch (e) {
console.log(`error ${e}`)
};
});
you can chain your promises... / use async await and do one task at time in order you need. you can also pass data from one promise to another as shown below.
// You can return promise directly
let createCustomer = function () {
return stripe.customers.create(param);
}
let addCardToCustomer = function(){
return stripe.customers.createSource(customer.id,{source:token.id});
};
// or use async /await
let chargeCustomerThroughCustomerID = async function () {
const data = await stripe.charges.create(param).catch((e) => { console.log(`error ${e}`); throw e })
// do something with data
return data;
}
let chargeCustomerThroughTokenID = async function () {
const data = await stripe.charges.create(param).catch((e) => { console.log(`error ${e}`); throw e });
// do something with data
return data;
}
router.post("/checkout", async function(req, res, next) { // Async
if (!req.session.cart) {
return res.redirect("/shopping-cart");
}
var cart = new Cart(req.session.cart);
try {
const createdCustumer = await createCustomer(); // promise 1
const addCardToCustomer = await addCardToCustomer(); // await addCardToCustomer(createdCustumer); to pass created customer info to next request
const chargCustomer = await chargeCustomerThroughCustomerID(); // promise 3
// more things...
res.send(...);
}
catch(e) {
console.log(`error ${e}`)
}
});

Express dosn't get return of other function querying Mongodb [duplicate]

This question already has answers here:
How do I return the response from an asynchronous call?
(41 answers)
Closed 3 years ago.
I'm working in a simple API Key authentication, I just want to verify the given key against the user provied key.
I have a seperate file with the function querying the database, and returning true/false and the user object.
But in my route.js file, the return object is undefined even tough in my auth.js file it isn't.
I tried making the the function in router.get an async function using express-promise-router and making the function an await return var user = await auth.verify(req.params.uid, req.get("token")) but I don't realy know how async works.
router.js
[...]
router.get('/list/:uid', function(req, res) {
var user = auth.verify(req.params.uid, req.get("token"))
console.log("User: " + user) // <-- Undefined
if (user.status) {
res.send("Success")
} else {
res.status(403)
res.json({status: 403, error: "Unkown User / Token"})
}
})
[...]
auth.js
var db = require('./db')
var ObjectId = require('mongodb').ObjectId;
module.exports = {
verify: (uid, key) => {
try {
var collection = db.get().collection('users')
const obj_id = new ObjectId(uid)
const query = { _id: obj_id }
collection.find(query).limit(1).toArray(function(err, user) {
var status = 0;
var usr = {};
if (err) {throw err}else{status=1}
if (user.length <= 0) {throw "NotExistingExc"; status = 0}else{
usr = user[0];
if (key != usr.api) status = 0
}
var returnObj = {
status: status,
user: usr
} /* --> Is {
status: 1,
user: {
_id: d47a2b30b3d2770606942bf0,
name: 'Sh4dow',
groups: [ 0 ],
api: 'YWFiMDI1MGE4NjAyZTg0MWE3N2U0M2I1NzEzZGE1YjE='
}
}
*/
return returnObj;
})
} catch (e) {
console.error(e)
return {
status: 0,
user: {},
error: e
}
}
}
}
db.js (Idk if needed)
var MongoClient = require('mongodb').MongoClient
var state = {
db: null,
}
exports.connect = function(url, done) {
if (state.db) return done()
MongoClient.connect(url, { useNewUrlParser: true }, function(err, db) {
if (err) return done(err)
state.db = db
done()
})
}
exports.get = function() {
return state.db.db("database")
}
exports.close = function(done) {
if (state.db) {
state.db.close(function(err, result) {
state.db = null
state.mode = null
done(err)
})
}
}
I want to have the returnObjin auth.js in the router.get of my route.js file.
Make auth.verify return a Promise which we can then await for it inside router, You can just make the callback async no need for express-promise-router
router.get('/list/:uid', async function(req, res) {
try {
var user = await auth.verify(req.params.uid, req.get("token"))
console.log("User: " + user)
if (user.status) {
res.send("Success")
} else {
res.status(403).json({status: 403, error: "Unkown User / Token"})
}
} catch (e) {
console.error(e)
res.status(/* */).json(/* */)
}
})
auth
module.exports = {
verify: (uid, key) => new Promise((resolve, reject) => {
var collection = db.get().collection('users')
const obj_id = new ObjectId(uid)
const query = { _id: obj_id }
collection.find(query).limit(1).toArray(function(err, user) {
var status = 0;
var usr = {};
if (err) {
reject(err)
return
} else {
status = 1
}
if (user.length <= 0) {
reject(new Error("NotExistingExc"))
return
} else {
usr = user[0]
if (key != usr.api) status = 0
}
var returnObj = {
status: status,
user: usr
}
resolve(returnObj);
})
}
}
In short, the reason you get undefined is because the code in auth.js is asyncronous. But you're really close. The toArray method in MongoDB returns a promise, so you need to make sure you return that promise and then use it in the router correctly.
In auth.js, make sure verify returns a promise - just add return!
return collection.find(query).limit(1).toArray(...)
And then, change your usage of the verify to the async/await you originally tried:
router.get('/list/:uid', async function(req, res) {
var user = await auth.verify(req.params.uid, req.get("token"))
// More code here...
})

Different response on NodeJS and Postman [duplicate]

I want to open a page up in node and process the contents in my application. Something like this seems to work well:
var opts = {host: host, path:pathname, port: 80};
http.get(opts, function(res) {
var page = '';
res.on('data', function (chunk) {
page += chunk;
});
res.on('end', function() {
// process page
});
This doesn't work, however, if the page returns an 301/302 redirect. How would I do that in a reusable way in case there are multiple redirects? Is there a wrapper module on top of the http to more easily handle processing http responses from a node application?
If all you want to do is follow redirects but still want to use the built-in HTTP and HTTPS modules, I suggest you use https://github.com/follow-redirects/follow-redirects.
yarn add follow-redirects
npm install follow-redirects
All you need to do is replace:
var http = require('http');
with
var http = require('follow-redirects').http;
... and all your requests will automatically follow redirects.
With TypeScript you can also install the types
npm install #types/follow-redirects
and then use
import { http, https } from 'follow-redirects';
Disclosure: I wrote this module.
Is there a wrapper module on top of the http to more easily handle processing http responses from a node application?
request
Redirection logic in request
Make another request based on response.headers.location:
const request = function(url) {
lib.get(url, (response) => {
var body = [];
if (response.statusCode == 302) {
body = [];
request(response.headers.location);
} else {
response.on("data", /*...*/);
response.on("end", /*...*/);
};
} ).on("error", /*...*/);
};
request(url);
Update:
Now you can follow all redirects with var request = require('request'); using the followAllRedirects param.
request({
followAllRedirects: true,
url: url
}, function (error, response, body) {
if (!error) {
console.log(response);
}
});
Here is my (recursive) approach to download JSON with plain node, no packages required.
import https from "https";
function get(url, resolve, reject) {
https.get(url, (res) => {
// if any other status codes are returned, those needed to be added here
if(res.statusCode === 301 || res.statusCode === 302) {
return get(res.headers.location, resolve, reject)
}
let body = [];
res.on("data", (chunk) => {
body.push(chunk);
});
res.on("end", () => {
try {
// remove JSON.parse(...) for plain data
resolve(JSON.parse(Buffer.concat(body).toString()));
} catch (err) {
reject(err);
}
});
});
}
async function getData(url) {
return new Promise((resolve, reject) => get(url, resolve, reject));
}
// call
getData("some-url-with-redirect").then((r) => console.log(r));
Here is function I use to fetch the url that have redirect:
const http = require('http');
const url = require('url');
function get({path, host}, callback) {
http.get({
path,
host
}, function(response) {
if (response.headers.location) {
var loc = response.headers.location;
if (loc.match(/^http/)) {
loc = new Url(loc);
host = loc.host;
path = loc.path;
} else {
path = loc;
}
get({host, path}, callback);
} else {
callback(response);
}
});
}
it work the same as http.get but follow redirect.
In case of PUT or POST Request. if you receive statusCode 405 or method not allowed. Try this implementation with "request" library, and add mentioned properties.
followAllRedirects: true,
followOriginalHttpMethod: true
const options = {
headers: {
Authorization: TOKEN,
'Content-Type': 'application/json',
'Accept': 'application/json'
},
url: `https://${url}`,
json: true,
body: payload,
followAllRedirects: true,
followOriginalHttpMethod: true
}
console.log('DEBUG: API call', JSON.stringify(options));
request(options, function (error, response, body) {
if (!error) {
console.log(response);
}
});
}
If you have https server, change your url to use https:// protocol.
I got into similar issue with this one. My url has http:// protocol and I want to make a POST request, but the server wants to redirect it to https. What happen is that, turns out to be node http behavior sends the redirect request (next) in GET method which is not the case.
What I did is to change my url to https:// protocol and it works.
Possibly a little bit of a necromancing post here, but...
here's a function that follows up to 10 redirects, and detects infinite redirect loops. also parses result into JSON
Note - uses a callback helper (shown at the end of this post)
( TLDR; full working demo in context here or remixed-version here)
function getJSON(url,cb){
var callback=errBack(cb);
//var callback=errBack(cb,undefined,false);//replace previous line with this to turn off logging
if (typeof url!=='string') {
return callback.error("getJSON:expecting url as string");
}
if (typeof cb!=='function') {
return callback.error("getJSON:expecting cb as function");
}
var redirs = [url],
fetch = function(u){
callback.info("hitting:"+u);
https.get(u, function(res){
var body = [];
callback.info({statusCode:res.statusCode});
if ([301,302].indexOf(res.statusCode)>=0) {
if (redirs.length>10) {
return callback.error("excessive 301/302 redirects detected");
} else {
if (redirs.indexOf(res.headers.location)<0) {
redirs.push(res.headers.location);
return fetch(res.headers.location);
} else {
return callback.error("301/302 redirect loop detected");
}
}
} else {
res.on('data', function(chunk){
body.push(chunk);
callback.info({onData:{chunkSize:chunk.length,chunks:body.length}});
});
res.on('end', function(){
try {
// convert to a single buffer
var json = Buffer.concat(body);
console.info({onEnd:{chunks:body.length,bodyLength:body.length}});
// parse the buffer as json
return callback.result(JSON.parse(json),json);
} catch (err) {
console.error("exception in getJSON.fetch:",err.message||err);
if (json.length>32) {
console.error("json==>|"+json.toString('utf-8').substr(0,32)+"|<=== ... (+"+(json.length-32)+" more bytes of json)");
} else {
console.error("json==>|"+json.toString('utf-8')+"|<=== json");
}
return callback.error(err,undefined,json);
}
});
}
});
};
fetch(url);
}
Note - uses a callback helper (shown below)
you can paste this into the node console and it should run as is.
( or for full working demo in context see here )
var
fs = require('fs'),
https = require('https');
function errBack (cb,THIS,logger) {
var
self,
EB=function(fn,r,e){
if (logger===false) {
fn.log=fn.info=fn.warn=fn.errlog=function(){};
} else {
fn.log = logger?logger.log : console.log.bind(console);
fn.info = logger?logger.info : console.info.bind(console);
fn.warn = logger?logger.warn : console.warn.bind(console);
fn.errlog = logger?logger.error : console.error.bind(console);
}
fn.result=r;
fn.error=e;
return (self=fn);
};
if (typeof cb==='function') {
return EB(
logger===false // optimization when not logging - don't log errors
? function(err){
if (err) {
cb (err);
return true;
}
return false;
}
: function(err){
if (err) {
self.errlog(err);
cb (err);
return true;
}
return false;
},
function () {
return cb.apply (THIS,Array.prototype.concat.apply([undefined],arguments));
},
function (err) {
return cb.apply (THIS,Array.prototype.concat.apply([typeof err==='string'?new Error(err):err],arguments));
}
);
} else {
return EB(
function(err){
if (err) {
if (typeof err ==='object' && err instanceof Error) {
throw err;
} else {
throw new Error(err);
}
return true;//redundant due to throw, but anyway.
}
return false;
},
logger===false
? self.log //optimization :resolves to noop when logger==false
: function () {
self.info("ignoring returned arguments:",Array.prototype.concat.apply([],arguments));
},
function (err) {
throw typeof err==='string'?new Error(err):err;
}
);
}
}
function getJSON(url,cb){
var callback=errBack(cb);
if (typeof url!=='string') {
return callback.error("getJSON:expecting url as string");
}
if (typeof cb!=='function') {
return callback.error("getJSON:expecting cb as function");
}
var redirs = [url],
fetch = function(u){
callback.info("hitting:"+u);
https.get(u, function(res){
var body = [];
callback.info({statusCode:res.statusCode});
if ([301,302].indexOf(res.statusCode)>=0) {
if (redirs.length>10) {
return callback.error("excessive 302 redirects detected");
} else {
if (redirs.indexOf(res.headers.location)<0) {
redirs.push(res.headers.location);
return fetch(res.headers.location);
} else {
return callback.error("302 redirect loop detected");
}
}
} else {
res.on('data', function(chunk){
body.push(chunk);
console.info({onData:{chunkSize:chunk.length,chunks:body.length}});
});
res.on('end', function(){
try {
// convert to a single buffer
var json = Buffer.concat(body);
callback.info({onEnd:{chunks:body.length,bodyLength:body.length}});
// parse the buffer as json
return callback.result(JSON.parse(json),json);
} catch (err) {
// read with "bypass refetch" option
console.error("exception in getJSON.fetch:",err.message||err);
if (json.length>32) {
console.error("json==>|"+json.toString('utf-8').substr(0,32)+"|<=== ... (+"+(json.length-32)+" more bytes of json)");
} else {
console.error("json==>|"+json.toString('utf-8')+"|<=== json");
}
return callback.error(err,undefined,json);
}
});
}
});
};
fetch(url);
}
var TLDs,TLDs_fallback = "com.org.tech.net.biz.info.code.ac.ad.ae.af.ag.ai.al.am.ao.aq.ar.as.at.au.aw.ax.az.ba.bb.bd.be.bf.bg.bh.bi.bj.bm.bn.bo.br.bs.bt.bv.bw.by.bz.ca.cc.cd.cf.cg.ch.ci.ck.cl.cm.cn.co.cr.cu.cv.cw.cx.cy.cz.de.dj.dk.dm.do.dz.ec.ee.eg.er.es.et.eu.fi.fj.fk.fm.fo.fr.ga.gb.gd.ge.gf.gg.gh.gi.gl.gm.gn.gp.gq.gr.gs.gt.gu.gw.gy.hk.hm.hn.hr.ht.hu.id.ie.il.im.in.io.iq.ir.is.it.je.jm.jo.jp.ke.kg.kh.ki.km.kn.kp.kr.kw.ky.kz.la.lb.lc.li.lk.lr.ls.lt.lu.lv.ly.ma.mc.md.me.mg.mh.mk.ml.mm.mn.mo.mp.mq.mr.ms.mt.mu.mv.mw.mx.my.mz.na.nc.ne.nf.ng.ni.nl.no.np.nr.nu.nz.om.pa.pe.pf.pg.ph.pk.pl.pm.pn.pr.ps.pt.pw.py.qa.re.ro.rs.ru.rw.sa.sb.sc.sd.se.sg.sh.si.sj.sk.sl.sm.sn.so.sr.st.su.sv.sx.sy.sz.tc.td.tf.tg.th.tj.tk.tl.tm.tn.to.tr.tt.tv.tw.tz.ua.ug.uk.us.uy.uz.va.vc.ve.vg.vi.vn.vu.wf.ws.ye.yt.za.zm.zw".split(".");
var TLD_url = "https://gitcdn.xyz/repo/umpirsky/tld-list/master/data/en/tld.json";
var TLD_cache = "./tld.json";
var TLD_refresh_msec = 15 * 24 * 60 * 60 * 1000;
var TLD_last_msec;
var TLD_default_filter=function(dom){return dom.substr(0,3)!="xn-"};
function getTLDs(cb,filter_func){
if (typeof cb!=='function') return TLDs;
var
read,fetch,
CB_WRAP=function(tlds){
return cb(
filter_func===false
? cb(tlds)
: tlds.filter(
typeof filter_func==='function'
? filter_func
: TLD_default_filter)
);
},
check_mtime = function(mtime) {
if (Date.now()-mtime > TLD_refresh_msec) {
return fetch();
}
if (TLDs) return CB_WRAP (TLDs);
return read();
};
fetch = function(){
getJSON(TLD_url,function(err,data){
if (err) {
console.log("exception in getTLDs.fetch:",err.message||err);
return read(true);
} else {
TLDs=Object.keys(data);
fs.writeFile(TLD_cache,JSON.stringify(TLDs),function(err){
if (err) {
// ignore save error, we have the data
CB_WRAP(TLDs);
} else {
// get mmtime for the file we just made
fs.stat(TLD_cache,function(err,stats){
if (!err && stats) {
TLD_last_msec = stats.mtimeMs;
}
CB_WRAP(TLDs);
});
}
});
}
});
};
read=function(bypassFetch) {
fs.readFile(TLD_cache,'utf-8',function(err,json){
try {
if (err) {
if (bypassFetch) {
// after a http errror, we fallback to hardcoded basic list of tlds
// if the disk file is not readable
console.log("exception in getTLDs.read.bypassFetch:",err.messsage||err);
throw err;
}
// if the disk read failed, get the data from the CDN server instead
return fetch();
}
TLDs=JSON.parse(json);
if (bypassFetch) {
// we need to update stats here as fetch called us directly
// instead of being called by check_mtime
return fs.stat(TLD_cache,function(err,stats){
if (err) return fetch();
TLD_last_msec =stats.mtimeMs;
return CB_WRAP(TLDs);
});
}
} catch (e){
// after JSON error, if we aren't in an http fail situation, refetch from cdn server
if (!bypassFetch) {
return fetch();
}
// after a http,disk,or json parse error, we fallback to hardcoded basic list of tlds
console.log("exception in getTLDs.read:",err.messsage||err);
TLDs=TLDs_fallback;
}
return CB_WRAP(TLDs);
});
};
if (TLD_last_msec) {
return check_mtime(TLD_last_msec);
} else {
fs.stat(TLD_cache,function(err,stats){
if (err) return fetch();
TLD_last_msec =stats.mtimeMs;
return check_mtime(TLD_last_msec);
});
}
}
getTLDs(console.log.bind(console));

NodeJS make promise wait for completion of foreach loop

I have a NodeJS script that calls the API for users, gets multiple data for each user and writes it all to local file. I am trying to upload that file to server once all of the data is written into the file. The problem is that the code that should upload the file gets executed before the file is entirely populated. The code is written below. I can't figure out how to make promise wait for first function to complete.
var fs = require('fs');
var server = require('some-server');
var service = require('./some-service.js');
var moment = require('moment-timezone');
var csvWriter = require('csv-write-stream');
var writer = csvWriter({
sendHeaders: false
});
var users = require('./some-users')
writer.pipe(fs.createWriteStream('myFile' + '.txt'))
service.login().then(function (response) {
users.forEach(function (user) {
service.getSpecificUser(user).then(function (response) {
var myUser = JSON.parse(response)
service.getDataForUser(user.Info).then(function (response) {
var userData = JSON.parse(response);
if (userData.IsValid) {
userData.AdditionalInfo.forEach(function (additionalInfo) {
service.getAdditionalInfo(myUser.Info, userData.data).then(function (response) {
//Collect additional info and combine final results to write into file
// write to output csv file
writer.write({
//write information that is of interest
})
}, function (error) {
console.log('error getting additional data', error);
})
}
)
}
}, function (error) {
console.log('error getting user data', error)
})
}, function (error) {
console.log('error', myUser, error)
})
});
}, function (error) {
console.log('not logged', response);
}).then(function () {
//perform uploading to server
var fpath = 'path of file that contains downloaded data'
console.log("Trying to upload to file: " +fpath)
service.UploadFile(fpath, function (error, result, response) {
if (!error) {
console.log("Uploaded " + name);
}
else {
console.log(error);
}
})
})
Any help would be appreciated.
You can substitute Promise.all(), Array.prototytpe.map() for .forEach(). The documentation for csv-write-steam appears to use .end() to complete call .write() at last .then().
service.login().then(function(response) {
return Promise.all(users.map(function(user) {
return service.getSpecificUser(user).then(function(response) {
var myUser = JSON.parse(response)
return service.getDataForUser(user.Info).then(function(response) {
var userData = JSON.parse(response);
if (userData.IsValid) {
return Promise.all(userData.AdditionalInfo.map(function(additionalInfo) {
return service.getAdditionalInfo(myUser.Info, userData.data).then(function(response) {
//Collect additional info and combine final results to write into file
// write to output csv file
writer.write({
//write information that is of interest
});
})
}))
}
})
})
}));
})
.then(function() {
writer.end();
//perform uploading to server
var fpath = 'path of file that contains downloaded data'
console.log("Trying to upload to file: " + fpath)
service.UploadFile(fpath, function(error, result, response) {
if (!error) {
console.log("Uploaded " + name);
} else {
console.log(error);
}
})
})
.catch(function(e) {
console.log(e)
})

Categories