Script keeps falling - javascript

There is a code. It's logic is at first to get file from a client (uploadFile), then load it to another server (changeImage), then load the answer (loadFile). It's all done with promises to chain them together but it doesn't work. It keeps to fall dawn. I have tried to change it a lot. Well actually I spent this whole day trying. But there is no result. In this version it's falling without any mistakes in console. Can you help me?
<pre>
var fs = require('fs'),
http = require('http'),
url = require('url'),
multiparty = require('multiparty'),
request = require('request');
var server = new http.Server();
var ifs = require('os').networkInterfaces();
var result = Object.keys(ifs)
.map(x => [x, ifs[x].filter(x => x.family === 'IPv4')[0]])
.filter(x => x[1])
.map(x => x[1].address)[2];
console.log('\nUse this ip: ' + result);
console.log("Successfully started\n");
server.listen('80', result);
server.on('request', onRequest);
function onRequest(req, res) {
var parsed = url.parse(req.url, true);
switch (parsed.pathname) {
case '/':
case '/index.html':
fs.readFile('index.html', function(err, file) {
if (err) res.end();
res.end(file);
});
break;
case '/file':
uploadFile(req)
.then(function(a) {
return changeImage({
'uploadfile': fs.createReadStream('./files/temp.jpg'),
'ef-set': 10,
'ef-set-2': 10,
'jpeg-quality': 80
}, 'https://www.imgonline.com.ua/grid-square-result.php',
'http://www.imgonline.com.ua/',
new RegExp(/download\.php\?file=.+?\.jpg/))
})
.then(function(link) {
//it falls before here because console.log('H') here won't show 'H' :-|
loadFile(link);
})
.then(function() {
return changeImage({
'uploadfile': fs.createReadStream('./files/temp.jpg'),
'efset1': 2,
'outformat': 2,
'jpegtype': 1,
'jpegqual': 85,
'jpegmeta': 1
},
'https://www.imgonline.com.ua/add-effect-black-white-result.php', '',
new RegExp(/https:\/\/.+?\.jpg/)
);
})
.then(function(link) {
loadFile(link);
})
.then(function() {
res.end('files/temp.jpg');
})
.catch(function(err) {
console.log('ERR ', err);
});
break;
default:
fs.readFile('./' + req.url, function(err, file) {
if (err) res.end();
res.end(file);
});
}
}
function uploadFile(req) {
if (fs.existsSync('./files/temp.jpg')) {
fs.unlink('./files/temp.jpg', function(err) {
if (err) reject(err);
});
}
return new Promise(function(resolve, reject) {
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
var path = files.uploadfile[0].path;
fs.copyFile(path, './files/temp.jpg', function(err) {
if (err) reject('ERRinCOPYING');
fs.unlink(path, function(err) {
if (err) reject(err);
var a = 0;
var timer = setInterval(function() {
if (fs.existsSync('./files/temp.jpg')) {
clearInterval(timer);
resolve();
}
}, 10);
});
});
});
});
}
function changeImage(formData, url, link, regExp) {
return new Promise(function(resolve, reject) {
request.post({
url: url,
formData: formData
}, function(err, resp, body) {
if (err) reject('ERRinREQUEST: ' + err);
link += body.match(regExp);
if (link.length > 32) {
resolve(link);
} else {
reject('ERROR! LINK WAS NOT FOUND');
}
});
});
}
function loadFile(link) {
request
.get(link)
.on('response', function(response) {
response.pipe(fs.createWriteStream('./files/temp.jpg'));
});
}
</pre>
The thing is if I comment the uploadFile() and run the rest of the script then everything works fine, and the opposite if I comment the rest of the script and leave only uploadFile() uncommented then again everything works.
Console shows nothing. Just on the client side I see the rejection of the connection. But nothing in the console. If I put console.log() right in 'return new Promise()' in changeImage it will show nothing
UPD: I ran the script with "node server" not as usual with "supervisor server" and it started to work without failings. But why? :\

You are likely running into a file system permission issue. When you run the script via node server as you mention, the process does not have the permission, which you do have when executing via supervisor server.

Related

Is there any way of calling a function to erease all files before uploading new ones with multer?

I'm trying to pass a function ereaseFiles() before the upload.array() method is called but I can't figure out how to make it.
The main goal is with a put request to delete all files on disk related to that object before of uploading new ones.
I've tried to set the function in the diskStorage section as well as in the callback of the put route. I even tried handling it in the function itself before the upload.array() method was called. I've tried working with promises but that is way too hard for me.
//function to be called (this works)
function ereaseFiles(req) {
glob("uploads/" + req.body.numeroScheda + "*", function (err, files) {
for (const file of files) {
fs.unlink(file, err => {
if (err) throw err;
console.log('successfully deleted files');
});
}
});
}
//My multer setup:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './uploads/');
},
filename: function (req, file, cb) {
cb(null, req.body.numeroScheda + "-" + file.originalname);
}
});
const upload = multer({
storage: storage, limits: {
fileSize: 1024 * 1024 * 2,
},
});
//MY EDIT PUT ROUTE
app.put("/immobili/:_id", upload.array('immaginePrincipale', 30), function (req, res) {
const requestedId = req.params._id;
const proprietaImmagini = req.files;
const immagini = proprietaImmagini.map(function (immagine) {
//console.log(immagine.path);
return immagine.path;
});
Immobile.findOneAndUpdate(requestedId, {
numeroScheda: req.body.numeroScheda,
categoria: req.body.categoria,
titolo: req.body.titolo,
sottotitolo: req.body.sottotitolo,
descrizione: req.body.descrizione,
localita: req.body.localita,
locali: req.body.locali,
superficie: req.body.superficie,
camere: req.body.camere,
bagni: req.body.bagni,
immagini: immagini,
}, function (err, updatedImmobile) {
if (err) return console.error(err);
res.redirect("/immobili/" + requestedId);
});
});
What should happen is that all files on disk associated with the object (numeroScheda) get deleted before the new ones are uploaded to keep the storage of files automated and clean.
EDIT 1:
I've created a delete route that works:
app.delete("/immobili/:_id", (req, res) => {
const requestedId = req.params._id;
Immobile.findOne({ _id: requestedId }, function (err, immobile) {
if (err) return console.error(err);
ereaseFiles(immobile);
});
Immobile.findOneAndRemove(requestedId, err => {
if (err) console.error(err);
else res.redirect('/immobili');
});
});
the function ereaseFiles looks now like this:
ereaseFiles = immobile => {
glob("uploads/" + immobile.numeroScheda + "*", function (err, files) {
for (const file of files) {
fs.unlink(file, err => {
if (err) throw err;
});
}
});
cancellato = true;
}
I've tried to implement this in the edit route with the middleware as kindly suggested with this:
app.use("/immobili/:_id", function (req, res, next) {
const requestedId = req.params._id;
let timer = setInterval(() => {
Immobile.findOne({ _id: requestedId }, (err, immobile) => {
if (err) return console.error(err);
ereaseFiles(immobile);
console.log(this.cancellato);
if (this.cancellato) {
clearInterval(timer);
next();
}
});
}, 1000);
});
This works if the uploaded pictures are more or same than before but if less it outputs with strange behaviors (sometimes it uploads some pictures, sometimes none).
You can use a middleware for that. Just make sure that the middleware is positioned before your put request handler.
app.use("/immobili/:_id", function(req,res,next){
eraseFiles(req);
let timer = setInterval(() => {
if(erased){
clearInterval(timer);
next();
}
},100);
})
app.put("/immobili/:_id", upload.array('immaginePrincipale', 30), function (req, res) { ...
EDIT 1:
Please change your eraseFiles function to:
function ereaseFiles(req) {
glob("uploads/" + req.body.numeroScheda + "*", function (err, files) {
for (let i=0;i<files.length-1;i++) {
fs.unlink(files[i], err => {
if (err) throw err;
console.log('successfully deleted files');
if(i===files.length-1)
erased = true;
});
}
});
}
Edit 2: Changed a lot of things. Now the approach is your route will go to the middleware first. Your eraseFiles function will be called. While the erased variable is not true, your put route will not be hit. When the erasedFiles function is complete it will set erased to true. For this to work, you'll have to set erased=false in the file before all this.
I won! The solution was to put a little timer on the next() function as it was firing too soon and the uploads and it was mixing the two. Thanks for your help everyone!
I've also added an ereased variable that turned off and on as the function completes. Thanks to Mr. Web for that.
Here's the answer if someone runs across this, there's some Italian in the code, hopefully is readable enough anyways!
cancellaFoto = immobile => {
cancellato = false;
glob("uploads/" + immobile.numeroScheda + "*", function (err, files) {
for (const file of files) {
fs.unlink(file, err => {
if (err) throw err;
});
}
});
cancellato = true;
}
app.use("/immobili/:_id", function (req, res, next) {
const requestedId = req.params._id;
Immobile.findOne({ _id: requestedId }, (err, immobile) => {
if (err) return console.error(err);
immobile.immagini = [];
cancellaFoto(immobile);
console.log(immobile.immagini);
if (this.cancellato) {
console.log(this.cancellato);
return setTimeout(next, 500);
} else {
return console.log("Aborted");
}
});
});
//EDIT PUT ROUTE
app.put("/immobili/:_id", upload.array('immaginePrincipale', 30), function (req, res) {
const requestedId = req.params._id;
const proprietaImmagini = req.files;
const immagini = proprietaImmagini.map(function (immagine) {
//console.log(immagine.path);
return immagine.path;
});
console.log(immagini);
Immobile.findOneAndUpdate(requestedId, {
numeroScheda: req.body.numeroScheda,
categoria: req.body.categoria,
titolo: req.body.titolo,
sottotitolo: req.body.sottotitolo,
descrizione: req.body.descrizione,
localita: req.body.localita,
locali: req.body.locali,
superficie: req.body.superficie,
camere: req.body.camere,
bagni: req.body.bagni,
immagini: immagini,
}, function (err, updatedImmobile) {
if (err) return console.error(err);
res.redirect("/immobili/" + requestedId);
});
});

How to avoid a callback in promise

I am new to NodeJS and JavaScript. I am badly stuck in a problem:
I want to generate QR image of 'some text' and after generating it, I want to query my MySQL database and insert the image to database.
Problem is that QRCode.toDataURL of SOLDAIR module goes in running state and query is called before the QR image is returned from the .toDataUrl function.
Hence it results in error.
I tried everything, promises, nested promises, counters, if statements etc., but I am unable to find a solution.
My code:
router.post('/generateTicket', (req,res) => {
const query1 = `SELECT * FROM booking ORDER BY bookingID DESC LIMIT 1`;
const query2 = `INSERT INTO ticket (ticket_image,BookingID) SET ?`;
let bookingID;
let count;
let ticket_data = {};
Promise.using(mysql.getSqlConn(), conn => {
conn.query(query1).then(resultOfQuery1 => {
bookingID = resultOfQuery1[0].BookingID;
count = resultOfQuery1[0].PeopleCount;
console.log("ID = " + bookingID + " people count = "+count);
promiseToCreateQRcode().then(function (URLfromResolve) {
console.log("the url is " + URLfromResolve );
}).catch(function (fromReject) {
console.log("Rejected "+ fromReject);
}); // catch of promise to create QR
}).catch(err => {
res.json({ status: 500, message: 'Error Occured in query 1 ' + err });
}); // catch of query 1
});
});
var opts = {
errorCorrectionLevel: 'H',
type: 'image/png',
rendererOpts: {
quality: 0.3
}
};
let promiseToCreateQRcode = function () {
let QRImage;
return new Promise(function (resolve,reject) {
QRCode.toDataURL('text', function (err, url) {
if (err) throw err
console.log("\n"+url+"\n");
QRImage = url;
});
if (QRImage)
resolve(QRImage);
else
reject("error occured in url");
});
};
As u can see, the program jumps to if statement and the QR image is not generated yet, hence it goes in "reject":
Try this,
let promiseToCreateQRcode = function () {
return new Promise(function (resolve,reject) {
QRCode.toDataURL('text', function (err, url) {
if (err){
reject(err); // or some message
} else {
resolve(url);
}
});
});
};
This way promise will be resolved only when toDataURL returns QR image.
Have a look at How do I convert an existing callback API to promises?. You need to call resolve or reject in the asynchronous callback!
function promiseToCreateQRcode() {
return new Promise(function(resolve,reject) {
QRCode.toDataURL('text', function (err, url) {
if (err) {
reject(err);
} else {
console.log("\n"+url+"\n");
resolve(url);
}
});
});
}
Using this extra QRImage variable like you did cannot work.

Expression statement is not assignment or call warning in Javascript code

Switched from Atom code editor to PHP Storm, and a lot of my code is being highlighted when I use promises with the following message: Expression statement is not assignment or call
Here is an example of some highlighted code:
getTickers.bitfinex = function() {
var counter = 0,
promises = []
//highlighted code begins here
new Promise(function(resolve, reject) {
request.get({
url: 'https://api.bitfinex.com/v1/symbols'
},
function(err, res, body) {
if (err) {
console.log(err, 'bitfinex api error')
reject(err, 'bitfinex api error')
}
if (!err) {
body = JSON.parse(body)
var symbols = []
body.forEach(function(symbol) {
symbol = 't' + symbol.toUpperCase()
symbols.push(symbol)
})
resolve(symbols)
}
})
})
.then((symbols) => {
var symbolsStr = symbols.join()
request.get({
url: 'https://api.bitfinex.com/v2/tickers?symbols=' + symbolsStr
},
function(err, res, body) {
body = JSON.parse(body)
if (err) {
console.log(err, 'bitfinex api error')
}
if (body[0] == 'error') {
console.log(body, 'bitfinex api error')
}
if (body[0] !== 'error') {
body.forEach(function(ticker) {
var promise = new Promise(function(resolve, reject) {
var currencyPair = ticker[0].replace("t", ""),
splitCurrencies = currencyPair.match(/[A-Z]{3}/g),
baseCurrency = splitCurrencies[0],
quoteCurrency = splitCurrencies[1]
Ticker.create({
currency_pair: baseCurrency + '-' + quoteCurrency,
base_currency: baseCurrency,
quote_currency: quoteCurrency,
last: ticker[7],
volume: ticker[8],
native_currency_pair: ticker[0],
exchange: 'bitfinex',
time: new Date().getTime()
}, function(err, document) {
if (err) {
reject(err)
}
if (document) {
counter++
resolve()
}
})
})
promises.push(promise)
})
Promise.all(promises)
.then(() => console.log(counter + ' bitfinex tickers updated'))
.catch((err) => console.log(err, 'bitfinex update error'))
}
})
})
.catch((err) => console.log(err))
//highlight ends here
}
What can I add or change in the code to make this correct so the warning goes away?
In order to disable this WebStorm-specific code inspection go to
WebStorm -> Preferences -> Editor -> Inspections
and uncheck the box under JavaScript -> JavaScript validity issues
that has the label, "expression statement which is not assignment or call".
If you would like to actually change your code to fix these errors, see this answer.

Different response on NodeJS and Postman [duplicate]

I want to open a page up in node and process the contents in my application. Something like this seems to work well:
var opts = {host: host, path:pathname, port: 80};
http.get(opts, function(res) {
var page = '';
res.on('data', function (chunk) {
page += chunk;
});
res.on('end', function() {
// process page
});
This doesn't work, however, if the page returns an 301/302 redirect. How would I do that in a reusable way in case there are multiple redirects? Is there a wrapper module on top of the http to more easily handle processing http responses from a node application?
If all you want to do is follow redirects but still want to use the built-in HTTP and HTTPS modules, I suggest you use https://github.com/follow-redirects/follow-redirects.
yarn add follow-redirects
npm install follow-redirects
All you need to do is replace:
var http = require('http');
with
var http = require('follow-redirects').http;
... and all your requests will automatically follow redirects.
With TypeScript you can also install the types
npm install #types/follow-redirects
and then use
import { http, https } from 'follow-redirects';
Disclosure: I wrote this module.
Is there a wrapper module on top of the http to more easily handle processing http responses from a node application?
request
Redirection logic in request
Make another request based on response.headers.location:
const request = function(url) {
lib.get(url, (response) => {
var body = [];
if (response.statusCode == 302) {
body = [];
request(response.headers.location);
} else {
response.on("data", /*...*/);
response.on("end", /*...*/);
};
} ).on("error", /*...*/);
};
request(url);
Update:
Now you can follow all redirects with var request = require('request'); using the followAllRedirects param.
request({
followAllRedirects: true,
url: url
}, function (error, response, body) {
if (!error) {
console.log(response);
}
});
Here is my (recursive) approach to download JSON with plain node, no packages required.
import https from "https";
function get(url, resolve, reject) {
https.get(url, (res) => {
// if any other status codes are returned, those needed to be added here
if(res.statusCode === 301 || res.statusCode === 302) {
return get(res.headers.location, resolve, reject)
}
let body = [];
res.on("data", (chunk) => {
body.push(chunk);
});
res.on("end", () => {
try {
// remove JSON.parse(...) for plain data
resolve(JSON.parse(Buffer.concat(body).toString()));
} catch (err) {
reject(err);
}
});
});
}
async function getData(url) {
return new Promise((resolve, reject) => get(url, resolve, reject));
}
// call
getData("some-url-with-redirect").then((r) => console.log(r));
Here is function I use to fetch the url that have redirect:
const http = require('http');
const url = require('url');
function get({path, host}, callback) {
http.get({
path,
host
}, function(response) {
if (response.headers.location) {
var loc = response.headers.location;
if (loc.match(/^http/)) {
loc = new Url(loc);
host = loc.host;
path = loc.path;
} else {
path = loc;
}
get({host, path}, callback);
} else {
callback(response);
}
});
}
it work the same as http.get but follow redirect.
In case of PUT or POST Request. if you receive statusCode 405 or method not allowed. Try this implementation with "request" library, and add mentioned properties.
followAllRedirects: true,
followOriginalHttpMethod: true
const options = {
headers: {
Authorization: TOKEN,
'Content-Type': 'application/json',
'Accept': 'application/json'
},
url: `https://${url}`,
json: true,
body: payload,
followAllRedirects: true,
followOriginalHttpMethod: true
}
console.log('DEBUG: API call', JSON.stringify(options));
request(options, function (error, response, body) {
if (!error) {
console.log(response);
}
});
}
If you have https server, change your url to use https:// protocol.
I got into similar issue with this one. My url has http:// protocol and I want to make a POST request, but the server wants to redirect it to https. What happen is that, turns out to be node http behavior sends the redirect request (next) in GET method which is not the case.
What I did is to change my url to https:// protocol and it works.
Possibly a little bit of a necromancing post here, but...
here's a function that follows up to 10 redirects, and detects infinite redirect loops. also parses result into JSON
Note - uses a callback helper (shown at the end of this post)
( TLDR; full working demo in context here or remixed-version here)
function getJSON(url,cb){
var callback=errBack(cb);
//var callback=errBack(cb,undefined,false);//replace previous line with this to turn off logging
if (typeof url!=='string') {
return callback.error("getJSON:expecting url as string");
}
if (typeof cb!=='function') {
return callback.error("getJSON:expecting cb as function");
}
var redirs = [url],
fetch = function(u){
callback.info("hitting:"+u);
https.get(u, function(res){
var body = [];
callback.info({statusCode:res.statusCode});
if ([301,302].indexOf(res.statusCode)>=0) {
if (redirs.length>10) {
return callback.error("excessive 301/302 redirects detected");
} else {
if (redirs.indexOf(res.headers.location)<0) {
redirs.push(res.headers.location);
return fetch(res.headers.location);
} else {
return callback.error("301/302 redirect loop detected");
}
}
} else {
res.on('data', function(chunk){
body.push(chunk);
callback.info({onData:{chunkSize:chunk.length,chunks:body.length}});
});
res.on('end', function(){
try {
// convert to a single buffer
var json = Buffer.concat(body);
console.info({onEnd:{chunks:body.length,bodyLength:body.length}});
// parse the buffer as json
return callback.result(JSON.parse(json),json);
} catch (err) {
console.error("exception in getJSON.fetch:",err.message||err);
if (json.length>32) {
console.error("json==>|"+json.toString('utf-8').substr(0,32)+"|<=== ... (+"+(json.length-32)+" more bytes of json)");
} else {
console.error("json==>|"+json.toString('utf-8')+"|<=== json");
}
return callback.error(err,undefined,json);
}
});
}
});
};
fetch(url);
}
Note - uses a callback helper (shown below)
you can paste this into the node console and it should run as is.
( or for full working demo in context see here )
var
fs = require('fs'),
https = require('https');
function errBack (cb,THIS,logger) {
var
self,
EB=function(fn,r,e){
if (logger===false) {
fn.log=fn.info=fn.warn=fn.errlog=function(){};
} else {
fn.log = logger?logger.log : console.log.bind(console);
fn.info = logger?logger.info : console.info.bind(console);
fn.warn = logger?logger.warn : console.warn.bind(console);
fn.errlog = logger?logger.error : console.error.bind(console);
}
fn.result=r;
fn.error=e;
return (self=fn);
};
if (typeof cb==='function') {
return EB(
logger===false // optimization when not logging - don't log errors
? function(err){
if (err) {
cb (err);
return true;
}
return false;
}
: function(err){
if (err) {
self.errlog(err);
cb (err);
return true;
}
return false;
},
function () {
return cb.apply (THIS,Array.prototype.concat.apply([undefined],arguments));
},
function (err) {
return cb.apply (THIS,Array.prototype.concat.apply([typeof err==='string'?new Error(err):err],arguments));
}
);
} else {
return EB(
function(err){
if (err) {
if (typeof err ==='object' && err instanceof Error) {
throw err;
} else {
throw new Error(err);
}
return true;//redundant due to throw, but anyway.
}
return false;
},
logger===false
? self.log //optimization :resolves to noop when logger==false
: function () {
self.info("ignoring returned arguments:",Array.prototype.concat.apply([],arguments));
},
function (err) {
throw typeof err==='string'?new Error(err):err;
}
);
}
}
function getJSON(url,cb){
var callback=errBack(cb);
if (typeof url!=='string') {
return callback.error("getJSON:expecting url as string");
}
if (typeof cb!=='function') {
return callback.error("getJSON:expecting cb as function");
}
var redirs = [url],
fetch = function(u){
callback.info("hitting:"+u);
https.get(u, function(res){
var body = [];
callback.info({statusCode:res.statusCode});
if ([301,302].indexOf(res.statusCode)>=0) {
if (redirs.length>10) {
return callback.error("excessive 302 redirects detected");
} else {
if (redirs.indexOf(res.headers.location)<0) {
redirs.push(res.headers.location);
return fetch(res.headers.location);
} else {
return callback.error("302 redirect loop detected");
}
}
} else {
res.on('data', function(chunk){
body.push(chunk);
console.info({onData:{chunkSize:chunk.length,chunks:body.length}});
});
res.on('end', function(){
try {
// convert to a single buffer
var json = Buffer.concat(body);
callback.info({onEnd:{chunks:body.length,bodyLength:body.length}});
// parse the buffer as json
return callback.result(JSON.parse(json),json);
} catch (err) {
// read with "bypass refetch" option
console.error("exception in getJSON.fetch:",err.message||err);
if (json.length>32) {
console.error("json==>|"+json.toString('utf-8').substr(0,32)+"|<=== ... (+"+(json.length-32)+" more bytes of json)");
} else {
console.error("json==>|"+json.toString('utf-8')+"|<=== json");
}
return callback.error(err,undefined,json);
}
});
}
});
};
fetch(url);
}
var TLDs,TLDs_fallback = "com.org.tech.net.biz.info.code.ac.ad.ae.af.ag.ai.al.am.ao.aq.ar.as.at.au.aw.ax.az.ba.bb.bd.be.bf.bg.bh.bi.bj.bm.bn.bo.br.bs.bt.bv.bw.by.bz.ca.cc.cd.cf.cg.ch.ci.ck.cl.cm.cn.co.cr.cu.cv.cw.cx.cy.cz.de.dj.dk.dm.do.dz.ec.ee.eg.er.es.et.eu.fi.fj.fk.fm.fo.fr.ga.gb.gd.ge.gf.gg.gh.gi.gl.gm.gn.gp.gq.gr.gs.gt.gu.gw.gy.hk.hm.hn.hr.ht.hu.id.ie.il.im.in.io.iq.ir.is.it.je.jm.jo.jp.ke.kg.kh.ki.km.kn.kp.kr.kw.ky.kz.la.lb.lc.li.lk.lr.ls.lt.lu.lv.ly.ma.mc.md.me.mg.mh.mk.ml.mm.mn.mo.mp.mq.mr.ms.mt.mu.mv.mw.mx.my.mz.na.nc.ne.nf.ng.ni.nl.no.np.nr.nu.nz.om.pa.pe.pf.pg.ph.pk.pl.pm.pn.pr.ps.pt.pw.py.qa.re.ro.rs.ru.rw.sa.sb.sc.sd.se.sg.sh.si.sj.sk.sl.sm.sn.so.sr.st.su.sv.sx.sy.sz.tc.td.tf.tg.th.tj.tk.tl.tm.tn.to.tr.tt.tv.tw.tz.ua.ug.uk.us.uy.uz.va.vc.ve.vg.vi.vn.vu.wf.ws.ye.yt.za.zm.zw".split(".");
var TLD_url = "https://gitcdn.xyz/repo/umpirsky/tld-list/master/data/en/tld.json";
var TLD_cache = "./tld.json";
var TLD_refresh_msec = 15 * 24 * 60 * 60 * 1000;
var TLD_last_msec;
var TLD_default_filter=function(dom){return dom.substr(0,3)!="xn-"};
function getTLDs(cb,filter_func){
if (typeof cb!=='function') return TLDs;
var
read,fetch,
CB_WRAP=function(tlds){
return cb(
filter_func===false
? cb(tlds)
: tlds.filter(
typeof filter_func==='function'
? filter_func
: TLD_default_filter)
);
},
check_mtime = function(mtime) {
if (Date.now()-mtime > TLD_refresh_msec) {
return fetch();
}
if (TLDs) return CB_WRAP (TLDs);
return read();
};
fetch = function(){
getJSON(TLD_url,function(err,data){
if (err) {
console.log("exception in getTLDs.fetch:",err.message||err);
return read(true);
} else {
TLDs=Object.keys(data);
fs.writeFile(TLD_cache,JSON.stringify(TLDs),function(err){
if (err) {
// ignore save error, we have the data
CB_WRAP(TLDs);
} else {
// get mmtime for the file we just made
fs.stat(TLD_cache,function(err,stats){
if (!err && stats) {
TLD_last_msec = stats.mtimeMs;
}
CB_WRAP(TLDs);
});
}
});
}
});
};
read=function(bypassFetch) {
fs.readFile(TLD_cache,'utf-8',function(err,json){
try {
if (err) {
if (bypassFetch) {
// after a http errror, we fallback to hardcoded basic list of tlds
// if the disk file is not readable
console.log("exception in getTLDs.read.bypassFetch:",err.messsage||err);
throw err;
}
// if the disk read failed, get the data from the CDN server instead
return fetch();
}
TLDs=JSON.parse(json);
if (bypassFetch) {
// we need to update stats here as fetch called us directly
// instead of being called by check_mtime
return fs.stat(TLD_cache,function(err,stats){
if (err) return fetch();
TLD_last_msec =stats.mtimeMs;
return CB_WRAP(TLDs);
});
}
} catch (e){
// after JSON error, if we aren't in an http fail situation, refetch from cdn server
if (!bypassFetch) {
return fetch();
}
// after a http,disk,or json parse error, we fallback to hardcoded basic list of tlds
console.log("exception in getTLDs.read:",err.messsage||err);
TLDs=TLDs_fallback;
}
return CB_WRAP(TLDs);
});
};
if (TLD_last_msec) {
return check_mtime(TLD_last_msec);
} else {
fs.stat(TLD_cache,function(err,stats){
if (err) return fetch();
TLD_last_msec =stats.mtimeMs;
return check_mtime(TLD_last_msec);
});
}
}
getTLDs(console.log.bind(console));

Node mysql based module not working

I have a node based app that will look into a database for data. Because the database is fairly large with several tables, I am writing a module to help modularize the task. The problem is that I cannot get the main code to return all the data from the database lookup because I believe the program exits before it is executed. How do I get my node module working? My intention is to have the DB helper functions reside in the SomethingHelper.js module. I have the main code silly.js that looks like this:
// silly.js
var sh = require('./SomethingHelpers.js');
helper = new sh();
helper.then(function(res) {
var promise = helper.getAllForUsername('sonny');
promise.then(function(res) {
console.log('worked', res);
});
promise.catch(function(err) {
console.log('err: ', err);
});
});
helper.catch(function(err) {
console('Could not create object: ', err);
});
SomethingHelpers.js looks like this:
var mysql = require("mysql");
function SomethingHelpers() {
return new Promise(function(resolve, reject) {
this.connectionPool = mysql.createPool({
connectionLimit: 100,
host: 'server.somewhere.com',
user: "username",
password: "somepass",
database: "sillyDB",
debug: false
});
});
}
SomethingHelpers.prototype.getAllSomethingForUsername = function(username) {
var result = [];
return new Promise(function(resolve, reject) {
this.connectionPool.getConnection(function(err, connection) {
if (err) {
console.log('Error connecting to the silly database.');
return;
} else {
console.log('Connection established to the silly database. Super-Duper!');
return connection.query('SELECT something FROM somethingTable where username=\"' + username + '\"',
function(err, rows, field) {
connection.release();
if (!err) {
//console.log (rows.something);
rows.forEach(function(item) {
var allSomething = JSON.parse(item.something);
console.log(allSomething.length);
result.push(allSomething);
for (var i = 0; i < allSomething.length; i++) {
console.log(allSomething[i].handle);
}
console.log('\n\n');
});
console.log('Done');
return result;
} else {
console.log('Eeeeeeeek!');
//console.log (result);
return result;
}
});
}
});
});
} // End of getAllSomething ()
module.exports = SomethingHelpers;
I figured out the answer to my own question. Here's how I solved it. First, SomethingHelpers.js:
//SomethingHelpers.js
var mysql = require('promise-mysql');
function SomethingHelpers () {
this.pool = mysql.createPool({
connectionLimit: 100,
host: 'server.somewhere.com',
user: "username",
password: "somepass",
database: "sillyDB",
debug: false
});
}
SomethingHelpers.prototype.getAllSomethingsForThisUsername = function (username) {
let pool = this.pool;
return new Promise(function (resolve, reject) {
pool.getConnection ().then(function(connection) {
connection.query('SELECT something FROM somethingsTable where username=\"'+
username+'\"').
then (function (rows) {
resolve (getAllSomethings (rows));
}).catch (function (error) {
console.log ('Error: ', error);
});
});
});
}
function getAllSomethings (rows)
{
var result = [];
rows.forEach (function (item) {
var allSomethings = JSON.parse(item.something);
result.push (allSomethings);
});
return result;
}
module.exports = SomethingHelpers;
With the glory of Promise, the bounty from the helper module can be enjoyed thusly:
//silly.js
var hh = require ('./SomethingHelpers');
helper = new hh ();
thePromiseOfSomething = helper.getAllSomethingsForThisUsername ('sonny');
thePromiseOfSomething.then(function (rows) {
console.log (rows);
});
Thus releasing me from the tyranny of asynchronous thinking (J/K).

Categories