Let a function take precedence/priority over another in Node.js - javascript

I have a MongoClient function which seems to be asynchronous in nature. Followed by child process function which I am explicitly declaring as synchronous. The cp takes a parameter nextPort, and the value of nextPort is assigned inside MongoClient. Since cp.execSync takes precedence over MongoClient it doesn't take the new value of the variable rather the value it was declared with.
app.js
var nextPort=0; //Declaration
MongoClient.connect(url1,{ useUnifiedTopology: true }, function(err, db) {
if (err) throw err;
var dbo = db.db("mydb");
dbo.collection("mycol").distinct('port', function(err, result) {
if (err) throw err;
portArr=result;
console.log(portArr.length);
nextPort = portArr[portArr.length-1]+1; //New Value of nextPort
console.log(nextPort);
db.close();
});
});
console.log('Container Created\n');
console.log(nextPort);
const result3 = cp.execSync('docker run -d -p '+nextPort+':27017 -v '+volumeLoc+' --name '+containerName+' mongo:'+version);
//Taking the declaration value instead of new
console.log(result3.toString());
How do I execute MongoClient before cp.execSync. (Also, I'd really appreciate if the solution doesn't deal with promises)

Try to learn the async / await syntax for cleaner code. I dont know if this is just a script or an function, anyway, i wrapped it in an async IIFE:
(async () => {
var nextPort = 0; //Declaration
try {
var db = await MongoClient.connect(url1, { useUnifiedTopology: true });
var dbo = db.db("mydb");
var result = await dbo.collection("mycol").distinct("port");
portArr = result;
console.log(portArr.length);
nextPort = portArr[portArr.length - 1] + 1; //New Value of nextPort
console.log(nextPort);
db.close();
} catch (err) {
throw err;
}
console.log("Container Created\n");
console.log(nextPort);
const result3 = cp.execSync(
"docker run -d -p " +
nextPort +
":27017 -v " +
volumeLoc +
" --name " +
containerName +
" mongo:" +
version
);
//Taking the declaration value instead of new
console.log(result3.toString());
})();

You either need to call your cp.execSync inside the callback function you provided to MongoClient.connect or you should use async/await syntax, e.g.:
const client = await MongoClient.connect(url1, { useUnifiedTopology: true })
const result3 = cp.execSync(...)
Make sure your wrapper function (factory method to instantiate a client in
a separate module would be better approach) is async, take a look at the example

Related

TypeError [ERR_INVALID_CALLBACK]: Callback must be a function

I wanted to make a script to add a new rule to angular webpack app, as below.Some times the code executes partially, some times it give erorr.
const fs = require('fs');
const commonCliConfig = 'node_modules/#angular-devkit/build-angular/src/angular-cli-files/models/webpack-configs/common.js';
const pug_rule = "\n{ test: /\\.pug$/, loader: ['raw-loader' , 'pug-html-loader' ]},";
var configText = "";
fs.readFile(commonCliConfig, function(err, data) {
if (err) throw err;
configText = data.toString();
if (configText.indexOf(pug_rule) > -1) { return; }
const position = configText.indexOf('rules: [') + 8;
const output = [configText.slice(0, position), pug_rule, configText.slice(position)].join('');
const file = fs.openSync(commonCliConfig, 'r+');
fs.writeFile(file, output);
fs.close(file);
});
Terminal node pug-rule.js
fs.js:148
throw new ERR_INVALID_CALLBACK();
^
TypeError [ERR_INVALID_CALLBACK]: Callback must be a function
at makeCallback (fs.js:148:11)
at Object.fs.close (fs.js:520:20)
at path/pug-rule.js:18:5
at FSReqWrap.readFileAfterClose [as oncomplete] (fs.js:422:3)
fs.writeFile(...) requires a third (or fourth) parameter which is a callback function to be invoked when the operation completes. You should either provide a callback function or use fs.writeFileSync(...)
See node fs docs for more info.
Try this.
fs.readFile('readMe.txt', 'utf8', function (err, data) {
fs.writeFile('writeMe.txt', data, function(err, result) {
if(err) console.log('error', err);
});
});

Nodejs send ajax to client in callback method

I am writing a Cmus Remote that is browser based and uses Nodejs on the backend. Part of the app involves getting the currently playing song and displaying it to the user.
Currently it successfully runs the command gets the output stored into a variable properly, but the client side request runs before the callback of the server side function thus it retruns an empty string for the song.
Here is the code to better illustrate what I mean:
index.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Cmus Remote</title>
<script src="client.js"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
</head>
<body id="body">
</body>
</html>
client.js
"use strict";
window.onload = function () {
$.get("/songInfo", function(string){
alert(string);
});
};
server.js
var express = require('express');
var app = express();
var path = require('path');
var exec = require('child_process').exec;
var fs = require('fs');
var child;
var getSongCommand = "cmus-remote -Q | sed -n -e 's/^.*tag title //p'";
var getAlbumCommand = "cmus-remote -Q | sed -n -e 's/^.*tag album //p'";
var getArtistCommand = "cmus-remote -Q | sed -n -e 's/^.*tag artist //p'";
var song ="";
var album= "";
var artist = "";
app.use(express.static(__dirname + '/public'));
app.get('/', function (req, res){
res.sendFile(path.join(__dirname + '/index.html'));
});
app.get('/songInfo', function(req, res){
updateSongInfo(getSongCommand);
updateSongInfo(getAlbumCommand);
updateSongInfo(getArtistCommand);
var strings = [song, artist, album];
res.send(strings);
});
var server = app.listen(8080, function () {
console.log("Server online");
});
function updateSongInfo(command){
var exec = require('child_process').exec;
exec(command, function(error, stdout, stderr){
callback(command, stdout);
});
}
function callback(commandRan, output){
console.log("Commandran = " + commandRan);
console.log("Command output = " + output);
if(commandRan.includes("title")){
console.log("Updating song to " + output);
song = output;
}
if(commandRan.includes("album")){
album = output;
}
if(commandRan.includes("artist")){
artist = output;
}
// console.log("In callback");
// console.log(output);
return output;
}
To summarize, the ajax response is working properly, the command runs properly and the values are saved to the 3 global variables I have, but I am not sure how to set up the timing that the ajax request returns once the variables have values.
The problem is because exec is asynchronous, so exec will be called, the program will continue and return the still empty data to the caller and then later it will finish with the execution data now received.
To fix this you can use Promise and async/await.
app.get('/songInfo', async function(req, res){
await updateSongInfo(getSongCommand);
await updateSongInfo(getAlbumCommand);
await updateSongInfo(getArtistCommand);
var strings = [song, artist, album];
res.send(strings);
});
...
function updateSongInfo(command){
return new Promise((resolve, reject) => {
var exec = require('child_process').exec;
exec(command, function(error, stdout, stderr){
callback(command, stdout);
return resolve();
});
});
}
Calling resolve() inside the Promise will complete it, while calling reject() will throw an error. Also you can give those functions parameters as well.
Your updateSongInfo function runs asynchronously, so you're server is sending back a response before the update has completed. What you'll need to do is either implement Promises or a callback to run after those functions have completed. I would probably suggest that you not use global variables here, but instead return the result each time. Here's an example:
app.get('/songInfo', function(req, res) {
var song, artist, album;
updateSongInfo(getSongCommand, function(err, result) {
if (err) return res.send(err);
song = result;
updateSongInfo(getAlbumCommand, function(err, result) {
if (err) return res.send(err);
album = result;
updateSongInfo(getArtistCommand, function(err, result) {
if (err) return res.send(err);
artist = result
// Now your globals will be fulfilled
return res.send([song, artist, album]);
});
});
});
});
function updateSongInfo(command, cb){
var exec = require('child_process').exec;
exec(command, function(error, stdout, stderr){
callback(command, stdout, cb);
});
}
function callback(commandRan, output, cbFunction){
console.log("Commandran = " + commandRan);
console.log("Command output = " + output);
if(commandRan.includes("title")){
console.log("Updating song to " + output);
song = output;
}
if(commandRan.includes("album")){
album = output;
}
if(commandRan.includes("artist")){
artist = output;
}
// console.log("In callback");
// console.log(output);
return cbFunction(null, output);
}

Is it okay to use synchronous code to load data into the memory at startup?

In my routes.js file, I've this:
var pages = require('./pages')();
...
app.get('/:page', function(req, res, next) {
var p = req.params.page;
if (p in pages) {
res.render('page', pages[p]);
} else {
next();
}
});
pages.js:
module.exports = function() {
var fs = require('fs'),
ret = [],
dir = './pages',
files = fs.readdirSync(dir);
files.forEach(function(file) {
var text = fs.readFileSync(dir + '/' + file, 'utf-8'),
fileName = file.substr(0, file.lastIndexOf('.'));
ret[fileName] = {content: text};
});
return ret;
};
This code runs only one, when I run node. This is how I can make it async:
require('./pages')(function(pages) {
app.get('/:page', function(req, res, next) {
var p = req.params.page;
if (p in pages) {
res.render('page', pages[p]);
} else {
next();
}
});
});
pages.js:
module.exports = function(callback) {
var fs = require('fs'),
ret = [],
dir = './pages';
fs.readdir(dir, function(err, files) {
if (err) throw err;
files.forEach(function(file, i) {
fs.readFile(dir + '/' + file, 'utf-8', function(err, text) {
if (err) throw err;
var fileName = file.substr(0, file.lastIndexOf('.'));
ret[fileName] = {content: text};
if ( i === (files.length - 1) ) callback(ret);
});
});
});
};
Assuming the total pages will not exceed more than 1 MB in size, I can cache the text into memory indefinitely without getting node crashed due to out of memory.
Should I be using the async code?
According to what I've learnt, the async version will make node start listening on localhost faster, but /:page URLs will only work when the files have been loaded into memory.
Is the async code in the right pattern?
What if I need to reuse the pages object in another file? Right now it is only accessible in routes.js.
Can I rewrite pages.js to execute only once like this:
var ret = [];
module.exports = function(callback) {
var fs = require('fs'),
dir = './pages';
if (ret.length < 1) {
fs.readdir(dir, function(err, files) {
if (err) throw err;
files.forEach(function(file) {
fs.readFile(dir + '/' + file, 'utf-8', function(err, text) {
if (err) throw err;
var fileName = file.substr(0, file.lastIndexOf('.'));
ret[fileName] = {content: text};
if ( i === (files.length - 1) ) callback(ret);
});
});
});
} else {
callback(ret);
}
};
What if require('./pages')(function(pages) {}) is called multiple times together? Is there a chance of the if condition failing? I can't wrap my mind around this.
Should I be using the async code?
If you want, why not. But there's no real need for it, synchronous IO on startup is fine. require does it as well.
Is the async code in the right pattern?
No. It does invoke callback once for each directory. Calling app.get('/:page', …) multiple times is not what you want.
What if I need to reuse the pages object in another file? Right now it is only accessible in routes.js.
You could pass it from routes.js to the other modules. Or just rewrite pages.js to store it statically and execute the async things only once, so that you can require it multiple times.
What if require('./pages')(function(pages) {}) is called multiple times together? Is there a chance of the if condition failing?
Yes, it will most certainly fail, because you are populating ret only asynchronously.
I can't wrap my mind around this.
Use promises. The act as asynchronous, unmutable values, just what you need here. They will guarantee that callbacks are only invoked once, that every callback is invoked with the same ret value, and provide many more useful things (like managing the parallel file reads for you).
You'll want to export a promise from pages.js.

Using Async queue to control DB connection request

I'm buidling an app with Node anb Mongodb Native. I'm working on a db module which i can require and call in other modules so that I end up using just one connection. The module db.js started out with this code:
var _db = null;
var getDb = module.exports.getDb = function(callback) {
if (_db) {
console.log('_db returned');
return callback(null, _db);
}
MongoClient.connect('mongodb://localhost:' + config.db.port + '/' + config.db.name, {native_parser: true}, function (err, db) {
if (err) return callback(err);
console.log('_db created');
_db = db;
callback(err, _db);
});
};
In my other modules that need a db connection I do this
db.getDb(function (err, connection) {
// Do something with connection
});
It works fine. But an unpleasant problem is that if my code would call getDb multiple times in a very short time span, I would end up with several copies of a connection. Like if I do my db.js requirements and getDb calls at the very beginning of all modules that need a db connection
I'm now thinking about controlling the calls to getDb by queuing them, so that only the absolute first call will create a connection and save it in _db. All later calls will get the created connection _db in return. I believe Async queue will help me with this...
The problem is that i dont understand how I write this with Async queue. The documentation is a little bit vague, and i dont find any better examples online. Maybe you can give me some hints. This is what i got so far...
var dbCalls = async.queue(function (task, callback) {
if (_db) {
console.log('_db returned');
return callback(null, _db);
}
MongoClient.connect('mongodb://localhost:' + config.db.port + '/' + config.db.name, {native_parser: true}, function (err, db) {
if (err) return callback(err);
console.log('Connected to mongodb://localhost:' + config.db.port + '/' + config.db.name);
_db = db;
callback(null, _db);
});
}, 1);
// I guess this .push() must be the exposed (exported) API for other modules to get a connection, but how do I return it to them,
dbCalls.push(null, function (err) {
console.log('finished processing foo');
});
dbCalls.push(null, function (err) {
console.log('finished processing bar');
});
I dont understand the object passed as first argument to .push() What should i use if for? Right now its null How do I pass on the connection and possible error all the way out to the module that made the call?
A quick and dirty solution without async.queue:
var _db = null;
var _err = null;
var _queue = [];
var _pending = false;
var getDb = module.exports.getDb = function(callback) {
if (_err || _db) {
console.log('_db returned');
return callback(_err, _db);
} else if (_pending) { // already a connect() request pending
_queue.push(callback);
} else {
_pending = true;
_queue.push(callback);
MongoClient.connect(..., function (err, db) {
_err = err;
_db = db;
_queue.forEach(function(queuedCallback) {
queuedCallback(err, db);
});
});
};

Node.js read and write file lines

I tried to read a file line by line, and output it to another file, using Node.js.
My problem is the sequence of lines sometimes messed up due to async nature of Node.js.
eg my input file is like:
line 1
line 2
line 3
but output file could be like:
line 1
line 3
line 2
Below is my code.
var fs = require("fs");
var index = 1;
fs.readFileSync('./input.txt').toString().split('\n').forEach(
function (line) {
console.log(line);
fs.open("./output.txt", 'a', 0666, function(err, fd) {
fs.writeSync(fd, line.toString() + "\n", null, undefined, function(err, written) {
})});
}
);
Any thoughts would be appreciated, thanks.
If you're writing a synchronous code, use only the synchronous functions:
var fs = require("fs");
fs.readFileSync('./input.txt').toString().split('\n').forEach(function (line) {
console.log(line);
fs.appendFileSync("./output.txt", line.toString() + "\n");
});
For asynchronous approach you could write something like
var fs = require('fs'),
async = require('async'),
carrier = require('carrier');
async.parallel({
input: fs.openFile.bind(null, './input.txt', 'r'),
output: fs.openFile.bind(null, './output.txt', 'a')
}, function (err, result) {
if (err) {
console.log("An error occured: " + err);
return;
}
carrier.carry(result.input)
.on('line', result.output.write)
.on('end', function () {
result.output.end();
console.log("Done");
});
});
I suppose you want to perform some calculations and/or transformations on every line. If not, simple copy is one-liner (take a look at createReadStream documentation)
fs.createReadStream('./input.txt').pipe(fs.createWriteStream('./output.txt'));
Now, you are trying to open file each time you want to write line, and yes, order is unpredictable here.
More correct version of your code:
var lines = fs.readFileSync('./input.txt').toString().split('\n')
function writeLineFromArray(lines) {
var line = arr.shift();
fs.open("./output.txt", 'a', 0666, function(err, fd) {
fs.writeSync(fd, line + '\n', null, undefined, function(err, written) {
writeLineFromArray(lines);
});
});
}
writeLinesFromArray();
I'd probably use one of 'given input stream, notify me on each line' modules, for example node-lazy or byline:
var fs = require('fs'),
byline = require('byline');
var stream = byline(fs.createReadStream('sample.txt'));
stream.on('line', function(line) {
// do stuff with line
});
stream.pipe(fs.createWriteStream('./output');
Why do not you use node.js plugin for the same: https://github.com/pvorb/node-read-files
Installation: npm install read-files
Usage:
var fs = require("fs");
require("read-files");
fs.readFiles([ "file1", "file2" ], "utf8", function(err, data) {
if (err) throw err;
console.log(data);
});

Categories