I have created an almost unreadable code like this with different MongoDB queries. There is a high chance you can't make it till the end.
Collection1.find({}, (err, doc) => {
if (err)
throw err;
else if (doc) {
Collection2.find({}, (err, doc) => {
if (err)
throw err;
else if (doc) {
let cursor = Collection3.find({})
cursor.exec((err, doc) => {
if (err)
throw err;
else if (doc) {
Collection4.find();
}
});
} else {
var y = new Collection2({
x: "S",
y: "D"
})
y.save((err, doc) => {
if (err)
throw err;
else if (doc) {
Collection3.find({});
let cursor = Collection3.find({})
cursor.exec((err, doc) => {
if (err)
throw err;
else if (doc) {
Collection4.find();
}
})
}
})
}
})
} else {
taskComplete();
}
})
Now one solution to the callback hell problem is that I break those anonymous function as named function.That might solve some problem but I think we can do better than that.
I also thought about async.series, async.waterfall... Somehow I feel they don't fit the bill as it's not like I need to get this task done post this. It's more like if this than that which is not supported by both of these. I cannot choose which task to run based on conditions.
Which leaves me with promises and generators.
Well, those DB calls are promises in itself. But how to integrate that with if and else is something I don't have experience with. I was hoping that more experienced JavaScript out here on SO could help me out with this and give me some guidelines on how to handle this issue more gracefully.
Edit I haven't really given a thought to generators. But would they help if included?
Related
In the below code, users.push used within ‘db.each’ wont work. However, if I move ‘users.push’ outside then it seems to work.
How can I push the new objects from db.each into the users array?
let db = new sqlite3.Database('./db/main.db', (err) => {
if (err) console.error(err.message);
console.log('Connected to the main database.');
});
var users = [];
db.serialize(() => {
db.each(`SELECT email, name FROM users`, (err, row) => {
if (err) console.error(err.message);
let user = {
email: row.email,
name: row.name
}
users.push(user);
});
});
console.log(JSON.stringify(users));
db.close();
I am using express and sqlite3 node packages.
It's because db.serializeand db.each are asynchronous functions (and return immediately, thus executing console.log before the db callbacks are executed).
Here should be a working example :
db.serialize(() => {
db.each(`SELECT email,
name
FROM users`, (err, row) => {
if (err) {
console.error(err.message);
}
let user = {
email : row.email,
name : row.name
}
users.push(user);
console.log(JSON.stringify(users));
db.close();
});
});
First error: asynchronicity not handled properly
As Antoine Chalifour pointed out, you call console.log(JSON.stringify(users)); before users gets modified in the asynchronous callback. Refer to his answer for fix and explanations.
Second error: errors not handled
You wrote if (err) { console.error(err.message); } then go on with the rest of the function. That is bad, because an error might happen and you'd just continue with your program. You should instead write something like:
if (err) {
console.error(err);
return;
}
or:
if (err) throw err;
I use nodejs and mysql for my app. I do the DB query in such manner:
try {
myDB.query(SQL, object, (err, res) => {
if (err) throw err
...
}
} catch (err) {
console.log(err.message)
}
But this is do not work because of async query func. So how to catch those errors, which is can occur in callback? Please help.
You can't throw inside of async code with callbacks. You must use async error handling:
function makeQuery(callback) {
myDB.query(SQL, object, (err, res) => {
if (err) {
callback(err)
return
}
...
}
}
It's up to the caller to provide a suitable callback function that takes (err, response) or something similar. It's also the responsibility of the caller to intercept, handle, or forward any and all errors.
If you use Promise-driven code you can either use .catch() or async functions with await that will work inside try. Sequelize is a good Promise-driven database driver.
Then you have code that looks like this:
let result = await myDB.query(SQL, object)
Which is obviously a lot cleaner.
You could use this example:
try {
connection.query('SELECT * FROM ??',[Table], function (err) {
if (err) console.error('err from callback: ' + err.stack);
});
} catch (e) {
console.error('err thrown: ' + err.stack);
}
for example. if "Table" does not exist, you would have the response:
'Table' doesn't exist
EDIT:
what #tadman says is correct, if you use if (err) throw err, you are only generating an exception error and you lose what you need.
I keep running into this pattern when coding in Meteor where I find myself making multiple method calls nested within each other - first method fires, then in the callback, a second one fires which is dependent on the first one's result, etc. Is there a better pattern for using multiple methods without nested method calls inside callbacks? The code quickly gets messy.
Meteor.call('unsetProduct', product._id, omitObj, function(err, result) {
if(!err) {
Meteor.call('editProduct', product._id, object, function(err, result) {
if(!err) {
//if no error, then continue to update the product template
Meteor.call('editProductTemplate', self._id, obj, function(err, result) {
if(!err) {
//call some other method
}
else {
FormMessages.throw(err.reason, 'danger');
}
});
}
else {
FormMessages.throw(err.reason, 'danger');
}
});//end edit product
}
else {
AppMessages.throw(err.reason, 'danger');
}
});`
Take a look at reactive-method package. I think it does exactly what you need: it wraps asynchronous Meteor.calls into synchronous code. With it, your code would look cleaner, like
try {
const result = ReactiveMethod.call('unsetProduct', product._id, omitObj);
} catch (error) {
AppMessages.throw(err.reason, 'danger');
}
try {
const nestedResult = ReactiveMethod.call('editProduct', product._id, object);
} catch (error) {
FormMessages.throw(err.reason, 'danger');
}
try {
const evenMoreNestedResult = ReactiveMethod.call('editProductTemplate', self._id, obj);
} catch (error) {
FormMessages.throw(err.reason, 'danger');
}
Which will look nicer when you add some logic inside try statements.
What I want to do is read a file and then be able to perform other operations with that information as I write the file. For example:
read file
write file and at the same time perform MD5 hash, digital signing etc.
I could use fs.readfile and fs.writefile as one operation and just copy the file from the web server to my computer, but I don't think I could still do these same operations. Anyway, skipping the in between stuff. How do I use fs.readfile and writefile to create two separate functions to copy a file? Here is what I have been working on, and yes I've read these forums extensively in search of an answer.
var fs = require('fs');
function getData(srcPath) {
fs.readFile(srcPath, 'utf8', function (err, data) {
if (err) throw err;
return data;
}
);
}
function writeData(savPath, srcPath) {
fs.writeFile (savPath, (getData(srcPath)), function(err) {
if (err) throw err;
console.log('complete');
}
);
}
//getData ('./test/test.txt');
writeData ('./test/test1.txt','./test/test.txt');
I want to be able to download files of any type and just make raw copies, with md5 hash etc attached to a JSON file. That will probably be a question for later though.
As suggested by dandavis in his comment, readFile does nothing because it is an asynchronous call. Check out this answer for additional information on what that means.
In short, an async call will never wait for the result to return. In your example, getData does not wait for readFile() to return the result you want, but will finish right away. Async calls are usually handled by passing callbacks, which is the last parameter to readFile and writeFile.
In any case, there are two ways to do this:
1.Do it asynchronously (which is the proper way):
function copyData(savPath, srcPath) {
fs.readFile(srcPath, 'utf8', function (err, data) {
if (err) throw err;
//Do your processing, MD5, send a satellite to the moon, etc.
fs.writeFile (savPath, data, function(err) {
if (err) throw err;
console.log('complete');
});
});
}
2.Do it synchronously. Your code won't have to change much, you will just need to replace readFile and writeFile by readFileSync and writeFileSync respectively. Warning: using this method is not only against best practises, but defies the very purpose of using nodejs (unless of course you have a very legitimate reason).
Edit: As per OP's request, here is one possible way to separate the two methods, e.g., using callbacks:
function getFileContent(srcPath, callback) {
fs.readFile(srcPath, 'utf8', function (err, data) {
if (err) throw err;
callback(data);
}
);
}
function copyFileContent(savPath, srcPath) {
getFileContent(srcPath, function(data) {
fs.writeFile (savPath, data, function(err) {
if (err) throw err;
console.log('complete');
});
});
}
This way, you are separating the read part (in getFileContent) from the copy part.
I had to use this recently, so I converted verybadallocs answer to promises.
function readFile (srcPath) {
return new Promise(function (resolve, reject) {
fs.readFile(srcPath, 'utf8', function (err, data) {
if (err) {
reject(err)
} else {
resolve(data)
}
})
})
}
function writeFile (savPath, data) {
return new Promise(function (resolve, reject) {
fs.writeFile(savPath, data, function (err) {
if (err) {
reject(err)
} else {
resolve()
}
})
})
}
Then using them is simple.
readFile('path').then(function (results) {
results += ' test manipulation'
return writeFile('path', results)
}).then(function () {
//done writing file, can do other things
})
Usage for async/await
const results = await readFile('path')
results += ' test manipulation'
await writeFile('path', results)
// done writing file, can do other things
To read and write a file with Non-blocking or Asynchronous way, you can use the advance features of es6 or higher like Promise or Async/await, but you must keep eye on Polyfills (https://javascript.info/polyfills) or if there are only a couple of read/write you can use call back Hell.
function readFiles(){
fs.readFile('./txt/start.txt', 'utf-8', (err, data1)=>{
if(err) return console.log(err);
fs.readFile(`./txt/${data1}.txt`, 'utf-8', (err, data2)=>{
if(err) return console.log(err);
fs.readFile('./txt/append.txt', 'utf-8', (err, data3)=>{
if(err) return console.log(err);
writeFile('./txt/final.txt', `${data2}\n${data3}`);
});
});
});
}
function writeFile(path, data){
fs.writeFile(path,data,'utf-8',err=>{
if(err){
console.log(err);
}
})
}
readFiles();
May be this is simple and stupid question, but im just learning my first asynchronous server language and redis is my first key-value db.
Example. I need do this:
$x = users:count
get user:$x
But with asynchronic javascript i get this code
redis-cli.get('users:count', function(err, repl){
if(err){
errorHandler(err);
} else {
redis-cli.get('user:'+repl, function(err, repl){
if(err){
errorHandler(err);
} else {
console.log('We get user '+repl+'!')
}
})
}
})
This code not so large and not nested to much, but it's look like on my firt not example/test project i get crazy nested functions-callbacks.
How solve this and make pretty and readable code?
function getUserCount(callback, err, repl) {
if (err) {
return callback(err);
}
redis-cli.get('user:' + repl, getUser.bind(null, callback));
}
function getUser(callback, err, repl) {
if (err) {
return callback(err);
}
console.log('We get user ' + repl + '!');
}
redis-cli.get('users:count', getUserCount.bind(null, errorHandler));
bind works wonders. If you prefer to have the bind abstracted then you can use this to store state that would normally be stored in closures like:
require("underscore").bindAll({
run: function (errorHandler) {
this.errorHandler = errorHandler;
redis-cli.get('users:count', this.getUserCount);
},
getUserCount: function (err, repl) {
if (err) return this.errorHandler(err);
redis-cli.get('user:' + repl, this.getUser);
},
getUser: function (err, repl) {
if (err) return this.errorHandler(err);
console.log('got user ', repl);
}
}).run(errorHandler);