So i made a user login, the database is mongodb and it works perfect. However, i want to to call the mongodb again after the person logins in, so i decided to split it up into separate functions. However, when i tried taking the login script and turn in into a function, the database wont open. Which makes no sense, since im using the exact same code that worked inside the route. Can this be done in a function? Does anyone know what’s going on?
Below is the code and images of the debugger proving the dB wont open.
Route
// Listen for Upload file
router.post('/uploadFile', function (req, res) {
upload(req, res, function (err) {
if (err) {
console.log("Error uploading file");
} else {
//var databaseName = "E-learn", collection = "Accounts";
var username = req.body.username ;
var fileName = req.file.filename ;
var filePath = req.file.path ;
console.log(username);
console.log("GET " + req.file.fieldname);
console.log("GET " + req.file.filename);
console.log("GET " + req.file.orignalName);
console.log("GET " + req.file.path);
var result = findOne(username);
res.json(result);
}
});
});
Function call
function findOne(username){
var databaseName = "E-learn", collection = "Accounts";
var db = new Db(databaseName, new Server('localhost', 27017));
db.open(function (err, db) {
console.log(databaseName + ": opened");
console.log(db);
db.collection(collection).findOne({username:username}, function (err, doc) {
assert.equal(null, err);
if (doc != null) {
console.log("Found");
// db.close();
return "Found" ;
} else {
console.log("Not Found");
// db.close();
return "Not found";
}
db.close();
});
});
console.log("Did not open")
db.close();
return 0 ; // Should not be called
}
Not sure what ES version you're running but if you want to take the synchronous approach, try this version of findOne. Async/Await makes asynch code synchronous.
async function findOne(username){
var databaseName = "E-learn", collection = "Accounts";
var db = new Db(databaseName, new Server('localhost', 27017));
let db = await db.open();
// try above line first
//let {err, db} = await db.open();
let doc = await db.collection(collection).findOne({username:username});
// try above line first
//let {err, doc} = await db.collection(collection).findOne({username:username});
//assert.equal(null, err);
if (doc != null) {
console.log("Found");
// db.close();
return "Found" ;
} else {
console.log("Not Found");
// db.close();
return "Not found";
}
db.close();
//console.log("Did not open")
//db.close();
//return 0 ; // Should not be called
}
If you get an error for Async/Await, try installing this package. https://www.npmjs.com/package/asyncawait
There are couple of issues in your code as observed on first look.
Use of return in asynchronous functions to return a result will not work.
You need to define a callback function which you pass as reference to findOne. This callback function needs to be called to return the result.
Printing of "Did not open" in this scenario does not mean Database did not open. This is asynchronous execution so database may open after printing of that console log.
You are facing the typical asynchronous callback problem that most newly introduced JavaScript developers experience.
First of all, JavaScript is asynchronous (Must read for you). That means that when you pass a function as a parameter of something, you can't expect that the code inside the function will run inline to the code. Maybe the function that you pass will be called as a result of an event (connect to something, user clicked a button, a database is opened...), so will happen sometime in the future. Even if is 1 nanosecond after, will be in the future.
So you expect it to run this way:
function findOne(username){
// Start with this (step 1)
var databaseName = "E-learn", collection = "Accounts";
// Then continue with this (step 2)
var db = new Db(databaseName, new Server('localhost', 27017));
// Open the database (step 3) and the code will wait for the database to open
db.open(function (err, db) {
// Run this inline, just after db.open (step 4)
console.log("OPEN");
[...]
});
// Continue with this after the database was open (step 5)
console.log("Did not open")
db.close();
return 0 ; // Should not be called
}
But actually what is happening is this:
function findOne(username){
// Start with this (step 1)
var databaseName = "E-learn", collection = "Accounts";
// Then continue with this (step 2)
var db = new Db(databaseName, new Server('localhost', 27017));
// Open the database (step 3), pass a function that will be called by the database when is open AND continue with the next step
db.open(function (err, db) {
// This function will be called when the database is open, so right now is not called.
console.log("OPEN");
[...]
});
// This is the next step after step 3 (step 4).
console.log("Did not open")
db.close();
return 0 ; // Should not be called
}
// And sometime in the future you suddenly get in console OPEN, when the database decides to run the callback you passed to it.
Another way to look into this is that a return will always return from a function, so having this nested function:
function findOne(username) { // function keyword, so this is a function (number 1)
[...]
db.open(function (err, db) { // Note the function keyword here, so this is a function too (number 2)
[...]
return 1; // This return actually works, BUT affects the function where it belongs, which is function number 2 in this case.
});
[...]
return 0; // And this one is the return of the function number 1.
}
So imagine that db.open runs the callback immediately, before running the very next line of code. Although it is not ran in an asynchronous way, the return inside the callback still can't return the findOne function.
Async problems require async solutions. (Or use ES6 generators (async, await, etc...), but currently will complicate you more as you won't still understand what is happening with your code, and worse, when to use generators (as they are tied to async callbacks), so better first understand async callbacks in JS). When you understand it, is pretty easy actually. Just a few changes:
// Listen for Upload file
router.post('/uploadFile', function (req, res) {
upload(req, res, function (err) {
if (err) {
console.log("Error uploading file");
} else {
//var databaseName = "E-learn", collection = "Accounts";
var username = req.body.username ;
var fileName = req.file.filename ;
var filePath = req.file.path ;
console.log(username);
console.log("GET " + req.file.fieldname);
console.log("GET " + req.file.filename);
console.log("GET " + req.file.orignalName);
console.log("GET " + req.file.path);
// Converted the sync function to an async one, by passing a
// callback function as a parameter with 2 arguments, being
// the first the possible error and the second the data
findOne(username, function(err, result) {
if (err) {
console.log("Error uploading file");
} else {
res.json(result);
}
});
}
});
});
// Here you can see now that we have the callback parameter,
// that references the callback function we passed before and
// we can call it whenever we want
function findOne(username, callback) {
var databaseName = "E-learn", collection = "Accounts";
var db = new Db(databaseName, new Server('localhost', 27017));
db.open(function (err, db) {
if (err) {
callback(err); // If error, pass the error as first argument of the callback
} else {
console.log(databaseName + ": opened");
console.log(db);
db.collection(collection).findOne({username:username}, function (err, doc) {
if (err) {
callback(err); // As findOne is another async callback too, the same as above. Check for err
} else {
// And if everything is fine, then pass the result as the second parameter of the callback
if (doc != null) {
callback(null, "Found");
} else {
callback(null, "Not found");
}
}
db.close();
});
}
});
}
As you may have noticed, you pass a lot of function as callbacks, in the database methods and inside the router methods too.
And some developer protips:
Now simply read the code line by line and try to understand what is happening on each one. This way you may wondered "Why the code I've not written is made by callbacks and mine not?". This question and a bit of research will helped you a lot.
Always have clean indentation. Your code have some problems with it. Indentation is a must in JavaScript where you can have a callback hell because of his asynchronous nature, and indentation helps a lot with it.
Related
This is possibly a duplicate question, but I can't seem to figure it out.
Essentially, I am making a code that runs in a while loop where I need to then read a file within that while loop, and it seems that the fileRead in the code just stops the while loop from getting to the end. I'm pretty newbie to javascript still, so it's probably an easy fix.
What I've tried so far is changing my jsonReader function to sync (readFileSync) and that just stopped the code before it did hardly anything. (that is now what the current code is as) I've also tried making a second function for specifically reading the files I need Synchronously and that didn't seem to work either. I'm not even sure if this has to do with synchronism
My Code:
module.exports = {
name: 'xprun',
description: "runs the xp handler",
execute(message) {
const name = message.author.username;
const server = message.guild.id;
const fs = require('fs');
function jsonReader(filePath, cb) {
fs.readFileSync(filePath, 'utf-8', (err, fileData) => {
if (err) {
return cb && cb(err);
}
try {
const object = JSON.parse(fileData);
return cb && cb(null, object);
} catch (err) {
return cb && cb(err);
}
});
}
console.log('Starting the loop...'); //
var run = true;
var i = 0;
while (run == true) {
i++
console.log('Running the loop...'); // Loop stops and re-runs here
// read #1
jsonReader(`./userData/rank/${server}_server/1.json`, (err, data) => {
if (err) {
console.log(err);
} else {
console.log(data.id); //
}
// read #2
jsonReader(`./userData/xp/${server}_server/${name}_xp.json`, (err, data) => {
if (err) {
console.log(err);
} else {
console.log(data.rank); //
}
console.log('The loop was completed'); //
if (i >= 5) {
run = false;
}
}); // end read #1
}); // end read #2
} // end while
console.log('The loop was ended'); //
} // end execute
} // end
As #CherryDT mentioned in the comments, readFileSync does not accept a callback. Because readFileSync is synchronous, it does not need a callback; readFile accepts a callback only because it is asynchronous, because it needs to wait until it has read the file before calling the code in the callback. The synchronous method does not need to wait in this way, so you can move the callback code out of the callback like so:
function jsonReader(filePath, cb) {
try {
const fileData = fs.readFileSync(filePath, 'utf-8');
const object = JSON.parse(fileData);
return cb && cb(null, object);
} catch (err) {
return cb && cb(err);
}
}
The reason your loop was infinitely running was because you set run to false only within your callback cb method, but because readFileSync does not accept a callback, your callback was never being run. With the above code, your callback should now be running, and the loop should no longer run infinitely (unless there are other issues within your callbacks).
I have a requirement where I need to get the records from table 1 and store in redis cache and once redis cache is finished storing, get table 2 records and store in redis cache. So there are 4 asynchronous functions.
Steps:
Get table 1 records
Store in redis cache
Get table 2 records
Store in redis cache
What is the correct procedure to handle it.
Below is the code which I have written to handle it. Please confirm whether its the right procedure or any other ways to handle it as per node.js
var redis = require("redis");
var client = redis.createClient(6379, 'path', {
auth_pass: 'key'
});
var mysqlConnection = // get the connection from MySQL database
get_Sections1()
function get_Sections1() {
var sql = "select *from employee";
mysqlConnection.query(sql, function (error, results) {
if (error) {
console.log("Error while Sections 1 : " + error);
} else {
client.set("settings1", JSON.stringify(summaryResult), function (err, reply){
if (err) {
console.log("Error during Update of Election : " + err);
} else {
get_Sections2();
}
});
}
});
}
function get_Sections2()
{
var sql = "select *from student";
mysqlConnection.query(sql, function (error, results)
{
if (error)
{
console.log("Error while Sections 2 : " + error);
}
else
{
client.set("settings2", JSON.stringify(summaryResult), function (err, reply)
{
if (err)
{
console.log("Error during Update of Election : " + err);
}
else
{
console.log("Finished the task...");
}
});
}
});
}
Create two parameterised functions. One for retrieval, one for storing.
Then promisify them both.
Then write:
return getTableRecords(1)
.then(storeInRedisCache)
.then(getTableRecords.bind(null,2))
.then(storeInRedisCache)
.then(done);
To promisify a function, something like this might work:
var getEmployees = new Promise(function(resolve, reject) {
var sql = "select *from employee";
mysqlConnection.query(sql, function (error, results) {
if (error) {
return reject();
} else {
return resolve(results);
}
});
});
If you are using an old version of NodeJS you will need a polyfill for Promise.
Here is an alternative to Ben Aston's solution using Promise.coroutine assuming promises:
const doStuff = Promise.coroutine(function*(){
const records = yield getTableRecords(1);
yield storeRecordsInCache(records);
const otherRecords = yield getTableRecords(2);
yield storeRecordsInCache(otherRecords); // you can use loops here too, and try/cath
});
doStuff(); // do all the above, assumes promisification
Alternatively, if you want to use syntax not yet supposed in Node (and use Babel to get support) you can do:
async function doStuff(){
const records = await getTableRecords(1);
await storeRecordsInCache(records);
const otherRecords = await getTableRecords(2);
await storeRecordsInCache(otherRecords); // you can use loops here too, and try/cath
})
I'm trying out the framework node.js on one of my projects.
I'm really seeing some good advantages on what they called "event-driven, non-blocking I/O model" however if my project there are some moments where I don't necessarily want to have some asynchronous calls and to be able to several operation before launching some asynchronous call.
Especially when I want to do some factorization and create some functions.
Typically I have the following case:
I know that in several part of my program I have to check if a media is existing in my database for a given string or id.
So as a guy who tried to stay organize I want to create a function that I will call each time I need to check this.
However, I did not find the way to do that with node.js and pg (the npm PostgreSQL library (https://github.com/brianc/node-postgres/) . Indeed, there is always a callback in the function and the return is null because of the callback. Here is an example below
/*
Function which is supposed to check if a media existing
*/
function is_media_existing (url_or_id){
log.debug("is_media_existing : begin of the function", {"Parameter" : url_or_id});
pg.connect(connectionstring, function (err, client, done) {
if (err) {
log.warning("is_media_existing : Problem with Database connection", {
"Parameter": url_or_id,
"Error": err
});
}
if (isNaN(url_or_id)) {
// Case is parameter is not a number (string)
var query = client.query('SELECT COUNT(*) as count FROM media WHERE url = $1::string ', url_or_id);
query.on('error', function (error) {
log.warning("is_media_existing : Problem with Database query (connection to db passed but not query " +
"", {"Parameter": url_or_id, "Error": error});
});
return query;
} else {
// Case is parameter is a int
log.debug("is_media_existing : Type of Parameter is a string");
// Case is parameter is not a number (string)
var query = client.query('SELECT COUNT(*) as count FROM media WHERE id = $1::id ', url_or_id);
query.on('error', function (error) {
log.warning("is_media_existing : Problem with Database query (connection to db passed but not query " +
"", {"Parameter": url_or_id, "Error": error});
});
return query;
}
});
}
// Executing the function
var test = is_media_existing("http://random_url_existing_in_db");
// test is always null as the return is in a callback and the callback is asynchronous
i have the feeling my question is touching the core concepts of node.js, and perhaps my approach is wrong and I apologize in advance.
I know it's not good to wait for a response before doing something.
But what's the alternative? How can I factorize my code into functions when I need some functionalities in several part of my code?
So if there would be anyone who could explain how to do that with a best practice of programming it would be great.
Thanks
Anselme
As Cody says, you probably dont want to do synchronous function.
The way you should handle the situation in your example is to pass in your own callback like this
function is_media_existing (url_or_id, callback){
and then instead of return query; use your callback like this-
callback(query);
or probably better to follow the node convention for callback functions to have two parameters (err, result) so your callback would look like this
callback(null, query);
Here is a rework of your sample
function is_media_existing (url_or_id, callback){ /* callback(err, result) */
log.debug("is_media_existing : begin of the function", {"Parameter" : url_or_id});
pg.connect(connectionstring, function (err, client, done) {
if (err) {
done(err);
log.warning("is_media_existing : Problem with Database connection", {
"Parameter": url_or_id,
"Error": err
});
return callback(err, null);
/* note that this return is simply used to exit the connect's callback and the return value is typically
* not used it is the call to callback() that returns the error value */
}
var qrystr;
if (isNaN(url_or_id)) {
log.debug("is_media_existing : Type of Parameter is a string");
qrystr = 'SELECT COUNT(*) as count FROM media WHERE url = $1::string;';
} else {
qrystr = 'SELECT COUNT(*) as count FROM media WHERE id = $1::id;';
}
client.query(qrystr, [url_or_id], function(err, result){
done();
if(err){
/* .. */
}
callback(err, result);
});
});
}
// Executing the function
var test = is_media_existing("http://random_url_existing_in_db", function(err, result){
if(err){
}else {
}
});
If you end up with a hard nest of callbacks, promises are really worth looking into.
I don't think you really do want a synchronous call. The problem with synchronous calls in node is that it stops the entire process from doing anything while a synchronous function is running as it will stop the event loop. As an example lets say your sync function takes 2 seconds to complete. Your server will then do nothing for 2 full seconds. That 2 seconds includes everything (accepting new connections, everything else, etc). The reason blocking functions don't exist is because they are (very) bad. Here is an example how your function will react in an async manor.
is_media_existing("http://random_url_existing_in_db", function(exists){
if (exists){
//do stuff
} else {
//do this other stuff
}
});
Then within is_media_existing you will need to call that callback function when your query completes.
//psuedo
function is_media_existing(url, callback){
query('select COUNT(*) as count FROM media WHERE id = $1::id '. [url], function(err, result){
if (err)
callback(false)
else
callback(result.count > 0)
})
}
With the new ES6 plus async stuff and babel its simpler. You can npm i -g babel npm i babel-runtime then compile and run the following with babel test.js --optional runtime --stage 2 | node. Please read the following example carefully to see how to adapt it to your use case:
let testData = [
{ id: 0, childIds: [1,2]},
{ id: 1, childIds:[] }
];
function dbGet(ids) {
return new Promise( r=> {
// this an example; you could do any db
// query here and call r with the results
r(ids.map((id) => { return testData[id];}));
});
}
async function dbExists(ids) {
let found = await dbGet(ids);
return (found && found.length>0);
}
async function test() {
var exists = await dbExists([0]);
console.log(exists);
}
test().then(f=>{}).catch( e=> {console.log('e',e)});
I ran into an issue whilst attempting to create the logic to add rows to a new table I made on my MySql database. When adding a row I need to query the database 4 times to check other rows and to then add the correct value to the new row. I am using node.js and the mysql module to accomplish this. While coding I ran into a snag, the code does not wait for the 4 queries to finish before inserting the new row, this then gives the values being found a value of 0 every time. After some research I realize a callback function would be in order, looking something like this:
var n = 0;
connection.query("select...", function(err, rows){
if(err) throw err;
else{
if(rows.length === 1) ++n;
}
callback();
});
function callback(){
connection.query("insert...", function(err){
if(err) throw err;
});
}
Note: The select queries can only return one item so the if condition should not effect this issue.
A callback function with only one query to wait on is clear to me, but I become a bit lost for multiple queries to wait on. The only idea that I had would be to create another variable that increments before the callback is called, and is then passed in the callback function's arguments. Then inside the callback the query could be encapsulated by an if statement with a condition of this being the variable equaling the number of queries that need to be called, 4 for my purposes here. I could see this working but wasn't sure if this sort of situation already has a built in solution or if there are other, better, solutions already developed.
You need async (https://github.com/caolan/async). You can do a very complex logic with this module.
var data = {} //You can do this in many ways but one way is defining a global object so you can add things to this object and every function can see it
firstQueryFunction(callback){
//do your stuff with mysql
data.stuff = rows[0].stuff; //you can store stuff inside your data object
callback(null);
}
secondQueryFunction(callback){
//do your stuff with mysql
callback(null);
}
thirdQueryFunction(callback){
//do your stuff with mysql
callback(null);
}
fourthQueryFunction(callback){
//do your stuff with mysql
callback(null);
}
//This functions will be executed at the same time
async.parallel([
firstQueryFunction,
secondQueryFunction,
thirdQueryFunction,
fourthQueryFunction
], function (err, result) {
//This code will be executed after all previous queries are done (the order doesn't matter).
//For example you can do another query that depends of the result of all the previous queries.
});
As per Gesper's answer I'd recommend the async library, however, I would probably recommend running in parallel (unless the result of the 1st query is used as input to the 2nd query).
var async = require('async');
function runQueries(param1, param2, callback) {
async.parallel([query1, query2, query3(param1, param2), query4],
function(err, results) {
if(err) {
callback(err);
return;
}
var combinedResult = {};
for(var i = 0; i < results.length; i++) {
combinedResult.query1 = combinedResult.query1 || result[i].query1;
combinedResult.query2 = combinedResult.query2 || result[i].query2;
combinedResult.query3 = combinedResult.query3 || result[i].query3;
combinedResult.query4 = combinedResult.query4 || result[i].query4;
}
callback(null, combinedResult);
});
}
function query1(callback) {
dataResource.Query(function(err, result) {
var interimResult = {};
interimResult.query1 = result;
callback(null, interimResult);
});
}
function query2(callback) {
dataResource.Query(function(err, result) {
var interimResult = {};
interimResult.query2 = result;
callback(null, interimResult);
});
}
function query3(param1, param2) {
return function(callback) {
dataResource.Query(param1, param2, function(err, result) {
var interimResult = {};
interimResult.query3 = result;
callback(null, interimResult);
});
}
}
function query4(callback) {
dataResource.Query(function(err, result) {
var interimResult = {};
interimResult.query4 = result;
callback(null, interimResult);
});
}
Query3 shows the use of parameters being 'passed through' to the query function.
I'm sure someone can show me a much better way of combining the result, but that is the best I have come up with so far. The reason for the use of the interim object, is that the "results" parameter passed to your callback is an array of results, and it can be difficult to determine which result is for which query.
Good luck.
I'm building a twitter clone using Node.js and MongoDB with mongoose. My Tweet model has body, user and created fields where user is the id of the user who has created the tweet. Now I'm building the API. I want when I make a GET request to receive a list of all the tweets (/api/tweets/) but except the user field (which returns only the id of the user) I want to get the whole user object so that I can display information about the tweet owner in my front-end part. I ended up with the following code.
exports.all = function (req, res, next) {
Tweet.find({}, function (err, tweets) {
if (err) return res.json(400, err);
var response = [];
tweets.forEach(function (element, index, array) {
var tweet = {};
tweet._id = element._id;
tweet.created = element.created;
tweet.body = element.body;
User.findById(element.user, function (err, user) { // <- This line
if (err) return res.json(400, err);
tweet.user = user;
});
response.push(tweet);
});
return res.json(response);
});
};
It works perfectly except that it doesn't add the user info. The problem is in the line I have marked. When javascript comes to that line, it tries to make it "parallel" and continues with the code without executing the callback function. But then it pushes the tweet object that doesn't have yet user info. How can I fix this?
You're going to want to use the async library. It will make your life much easier.
// inside `Tweet.find`
async.each(tweets, function(done) {
// do stuff to tweets
User.findById(element.user, function(err, user){
if (err) return done(err);
// do stuff with user
done();
});
}, function(err) {
// called when done
res.json(response);
});
The issue is that res.json sends the response so it doesn't matter that findById is being called. You need to call res.json once everything is done.
You can do this in several ways, but the easiest one with your existing code is to just keep a counter:
var tweetCount = 0;
tweets.forEach(/* snip */) {
User.findById(element.user, function (err, user) {
tweet.user = user;
tweetCount++;
response.push(tweet);
if (tweetCount == tweets.length) {
res.json(response);
}
});
});
You can use Q promise library to sync. It is simple and easy to use.
Response will only be send when whole promise chain is compeleted
var result = Q();
tweets.forEach(function (tweet, i) {
result = result.then(function () {
// do your stuff
}
result.then(function () {
res.json(response); //successfully completed
});