Node script doesn't ever end - javascript

I have the node script below to basically copy the contents of some files and insert them to mongo.
The script never seems to end and even though all the data gets inserted successfully, I always have to do Ctrl+C to kill it.
Is there something i'm supposed to use in node.js to end a script?
var mongoose = require('mongoose');
mongoose.connect('mongodb://localhost/testdb');
var dir = './seeds';
var db = mongoose.connection;
// Show connection error if there is one
db.on('error', console.error.bind(console, 'Database Connection Error:'));
// If we successfully connected to mongo
db.once('open', function callback() {
var fs = require('fs'); // Used to get all the files in a directory
// Read all the files in the folder
fs.readdir(dir, function(err, list) {
// Log the error if something went wrong
if(err) {
console.log('Error: '+err);
}
// For every file in the list
list.forEach(function(file) {
// Set the filename without the extension to the variable collection_name
var collection_name = file.split(".")[0];
var parsedJSON = require(dir + '/' + file);
for(var i = 0; i < parsedJSON.length; i++) {
// Counts the number of records in the collection
db.collection('cohort').count(function(err, count) {
if(err) {
console.log(err);
}
});
db.collection(collection_name).insert(parsedJSON[i], function(err, records) {
if(err) {
console.log(err);
}
console.log(records[0]);
console.log("Record added as "+records[0]);
});
}
});
});
});

When everything is done, call mongoose.disconnect(). As #AaronDufour correctly points out, node will not exit while event handler callbacks are registered because it doesn't know that no more events are expected, like the connection emitting a 'close' or 'error' event, for example.

you can call process.exit(); to exit

Related

How to create New MongoDb Database using the Node JS driver

I want to create a new Database in MongoDB using the Node JS driver. I tried the following approaches, but none of them created any databases (I checked using the mongo shell and RoboMongo) and the worst part is, it is not showing any errors, below programs are executed successfully without any error (I mean, error is NULL)
First Method, using the Mongo Server
var Db = require('mongodb').Db,
Server = require('mongodb').Server;
var db = new Db('myNewDatabase', new Server('localhost', 27017));
db.open(function (err, db) {
if (err) {
console.dir('ERROR we are in the callback of the open ');
console.dir(err);
throw err;
}
// Use the admin database for the operation
var adminDb = db.admin();
console.dir('we are in the callback of the open');
db.close();
});
Second approach I followed is:
var server = "localhost";
var port = 27017;
var dbName = "myNewDatabase";
var mongodb = require('mongodb');
var mongoClient = mongodb.MongoClient;
var connString = "mongodb://"+server+":"+port+"/"+dbName;
mongoClient.connect(connString, function(err, db) {
console.dir(err);
if(!err) {
console.log("\nMongo DB connected\n");
db.collection('test_correctly_access_collections', function(err, col2) {
console.dir(err);
if(err) {
console.dir('Thier is a error in creating collection');
console.dir(err);
}
console.log("\nColllection created succesfully\n");
db.close();
});
}
else{
console.log("Mongo DB could not be connected");
process.exit(0);
}
});
According to this link, we can use getDatabase API to create a new database, i tried for the same API in the Node JS, but i am unable to find one.
As searched in google and stackOverflow for this question, but I am able to find, only very few. So i am posting the answer to this myself.
At first thank you #somallg , you are right i voted up your comment.
Answer is, you need to insert document into collection and then, MongoDB will create the New Database and also the collection. So, above approaches in my question can we re-written as follows to create a new database using the Node JS Driver:
First Appoarch
var Db = require('mongodb').Db,
Server = require('mongodb').Server;
var db = new Db('myNewDatabase', new Server('localhost', 27017));
db.open(function (err, db) {
if (err) {
console.dir('ERROR we are in the callback of the open ');
console.dir(err);
throw err;
}
var collection = db.collection("simple_document_insert_collection_no_safe");
collection.insert({hello:'world_no_safe'});
console.dir('we are in the callback of the open');
db.close();
});
second approach
var server = "localhost";
var port = 27017;
var dbName = "myNewDatabase";
var mongodb = require('mongodb');
var mongoClient = mongodb.MongoClient;
var connString = "mongodb://"+server+":"+port+"/"+dbName;
mongoClient.connect(connString, function(err, db) {
console.dir(err);
if(!err) {
var collection = db.collection("simple_document_insert_collection_no_safe");
collection.insert({hello:'world_no_safe'});
}
else{
console.log("Mongo DB could not be connected");
process.exit(0);
}
db.close();
});

Documents "disappearing" from mongodb collection after node loading routine is finished

Being new to mongodb and node, this particular issue is just driving me crazy:
I have written a small module, which reads an .csv file, makes it into a JSON array, and loads it into a mongodb collection one record at a time in a loop.
As I run this in debug, and set a breakpoint at "var v = i;" line, I can query the mongo collection, and see a fully populated records appear one after another.
However, as soon as the loop is finished - ALL records' data is gone!
The actual records are still all there, but they all empty. The data which I just saw in each record is no longer in those records.
It may be some wired scoping issue, but, being new, I just can not tell what that is.
Here is my code:
exports.excelFileParser = function(fileName, tabName, metadataFields){
var assert = require('assert');
console.log(metadataFields);
if(typeof require !== 'undefined') XLSX = require('xlsx');
var mongodb = require('mongodb');
var _ = require('underscore');
var fs = require('fs');
var Converter=require("csvtojson").core.Converter;
var distinctDataFields;
var MongoClient = mongodb.MongoClient;
var url = 'mongodb://localhost:27017/datamanager-03-test';
var fileName = 'clean_file.csv';
//fs.writeFileSync(fileName, newCsvLines);
var csvFileName=fileName;
var fileStream=fs.createReadStream(csvFileName);
//new converter instance
var csvConverter=new Converter({constructResult:true});
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed",function(jsonObj){
//console.log(jsonObj); //here is your result json object
makeRecords(jsonObj);
});
function makeRecords(result){
console.log(result.length);
MongoClient.connect(url, function (err, db) {
if (err) {
console.log('Unable to connect to the mongoDB server. Error:', err);
} else {
console.log('Connection established to', url);
var collectionName = 'DC_Facilities';
db.open(function(err, client){
client.createCollection(collectionName, function(err, col) {
});
var collection = db.collection(collectionName);
for(var i =0;i < result.length; i++){
var dataRecord = result[i];
collection.insert(dataRecord);
var v = i;
}
console.log("finished");
db.close();
});
}
});
}
fileStream.pipe(csvConverter);
};
collection.insert is an asynchronous function, so you're calling db.close() before any of them have a chance to complete. You also don't need to call createCollection as the collection will be created for you if it doesn't already exist.
So your code should look something like this instead, so that db.close() isn't called until all the insert operations have completed:
db.open(function(err, client){
var collection = db.collection(collectionName);
var inserted = 0;
for(var i = 0; i < result.length; i++){
var dataRecord = result[i];
collection.insert(dataRecord, function(err) {
if (++inserted == result.length) {
console.log("finished");
db.close();
}
});
}
});

Using Async queue to control DB connection request

I'm buidling an app with Node anb Mongodb Native. I'm working on a db module which i can require and call in other modules so that I end up using just one connection. The module db.js started out with this code:
var _db = null;
var getDb = module.exports.getDb = function(callback) {
if (_db) {
console.log('_db returned');
return callback(null, _db);
}
MongoClient.connect('mongodb://localhost:' + config.db.port + '/' + config.db.name, {native_parser: true}, function (err, db) {
if (err) return callback(err);
console.log('_db created');
_db = db;
callback(err, _db);
});
};
In my other modules that need a db connection I do this
db.getDb(function (err, connection) {
// Do something with connection
});
It works fine. But an unpleasant problem is that if my code would call getDb multiple times in a very short time span, I would end up with several copies of a connection. Like if I do my db.js requirements and getDb calls at the very beginning of all modules that need a db connection
I'm now thinking about controlling the calls to getDb by queuing them, so that only the absolute first call will create a connection and save it in _db. All later calls will get the created connection _db in return. I believe Async queue will help me with this...
The problem is that i dont understand how I write this with Async queue. The documentation is a little bit vague, and i dont find any better examples online. Maybe you can give me some hints. This is what i got so far...
var dbCalls = async.queue(function (task, callback) {
if (_db) {
console.log('_db returned');
return callback(null, _db);
}
MongoClient.connect('mongodb://localhost:' + config.db.port + '/' + config.db.name, {native_parser: true}, function (err, db) {
if (err) return callback(err);
console.log('Connected to mongodb://localhost:' + config.db.port + '/' + config.db.name);
_db = db;
callback(null, _db);
});
}, 1);
// I guess this .push() must be the exposed (exported) API for other modules to get a connection, but how do I return it to them,
dbCalls.push(null, function (err) {
console.log('finished processing foo');
});
dbCalls.push(null, function (err) {
console.log('finished processing bar');
});
I dont understand the object passed as first argument to .push() What should i use if for? Right now its null How do I pass on the connection and possible error all the way out to the module that made the call?
A quick and dirty solution without async.queue:
var _db = null;
var _err = null;
var _queue = [];
var _pending = false;
var getDb = module.exports.getDb = function(callback) {
if (_err || _db) {
console.log('_db returned');
return callback(_err, _db);
} else if (_pending) { // already a connect() request pending
_queue.push(callback);
} else {
_pending = true;
_queue.push(callback);
MongoClient.connect(..., function (err, db) {
_err = err;
_db = db;
_queue.forEach(function(queuedCallback) {
queuedCallback(err, db);
});
});
};

node.js never exits after insert to couchbase, opposite of most node questions

My problem seems to be the opposite of every node.js question :-) I have a simple forEach loop to read a list of files and insert them into a Couchbase database. This works great, but it never exits after reading all the lines. So I added a counter to shutdown the couchbase connection after all inserts are complete. This works.
This process is intended to load hundreds of thousands of files, so I brought the async module into the mix to batch the inserts into groups of 100. The async.eachLimit is used to iterate over the array and insert documents in batches. Now the orig problem is back. Whatever magic async.eachLimit uses to recognize the process is complete is not happening.
I've been going through javascript scoping, callbacks, async, etc. Google searches are hitting keywords but not this issue. I've reduced the code down to the following testcase. To test, create three files and add their names to testlist.txt.
The async.eachLimit in place works up until it hits the limit, then hangs. Comment this out and uncomment array.forEach line and it works. Thanks in advance!
var fs = require('fs');
var couchbase = require('couchbase');
var async = require('async');
var filelist = 'testlist.txt';
var key_count = 0;
var cb_config = { host: 'localhost:8091', bucket: 'default'};
var db = new couchbase.Connection(cb_config, function(err) {
if (err) {
console.log('ERRR connect to couchbase at config['+cb_config+']');
throw err;
}
});
var insertFile=function(line) {
console.log('LOAD ['+line+']');
fs.readFile(line, function(file_err, f_doc) {
if(file_err) throw file_err;
db.set(line, f_doc, function(db_err, db_res){
if (db_err) {
console.log('FAIL ['+line+'] err['+db_err+']');
} else {
console.log('PASS ['+line+']');
}
key_count--;
if (key_count == 0) {
console.log('DONE Shutting down client, no more keys');
db.shutdown();
}
});
});
}
// read list of files into data array from file filelist
fs.readFile(filelist, function(filelist_err, lines) {
if(filelist_err) throw filelist_err;
// HACK split adds empty line to array, use replace to fix
var array = lines.toString().replace(/\n$/, '').split('\n');
key_count = array.length;
console.log('INIT lines['+key_count+']');
async.eachLimit(array, 2, insertFile, function(err) { console.log('FAIL async err['+err+']');} );
//array.forEach(function(data){insertFile(data);return;});
});
Testcase output using array.forEach:
INIT lines[3]
LOAD [files.big.txt]
LOAD [files.little.txt]
LOAD [files.txt]
PASS [files.little.txt]
PASS [files.big.txt]
PASS [files.txt]
DONE Shutting down client, no more keys
Testcase output using async.eachLimit:
INIT lines[3]
LOAD [files.big.txt]
LOAD [files.little.txt]
PASS [files.little.txt]
PASS [files.big.txt]
... hang, never gets to 3...
After review with a coworker, they spotted my mistake. I missed the async callback in my insertFile function. Adding that in works and allows me to remove the key counter! Solution code below:
var fs = require('fs');
var couchbase = require('couchbase');
var async = require('async');
var filelist = 'testlist.txt';
var key_count = 0;
var cb_config = { host: 'localhost:8091', bucket: 'default'};
var db = new couchbase.Connection(cb_config, function(err) {
if (err) {
console.log('ERRR connect to couchbase at config['+cb_config+']');
throw err;
}
});
var insertFile=function(line, callback) {
console.log('LOAD ['+line+']');
fs.readFile(line, function(file_err, f_doc) {
if(file_err) throw file_err;
db.set(line, f_doc, function(db_err, db_res){
if (db_err) {
console.log('FAIL ['+line+'] err['+db_err+']');
callback(db_err);
} else {
console.log('PASS ['+line+']');
callback();
}
});
});
}
// read list of files into data array from file filelist
fs.readFile(filelist, function(filelist_err, data) {
if(filelist_err) throw filelist_err;
// HACK stoopid bug split adds empty line to array, use replace to fix
var array = data.toString().replace(/\n$/, '').split('\n');
key_count = array.length;
console.log('READ files['+key_count+']');
async.eachLimit(array, 2, insertFile, function(err) {
if (err) console.log('LAST with async err['+err+']');
console.log('DONE Shutting down client, no more keys');
db.shutdown();
});
});
And successful output:
$ node testcase.js
READ files[3]
LOAD [files.big.txt]
LOAD [files.little.txt]
PASS [files.little.txt]
LOAD [files.txt]
PASS [files.big.txt]
PASS [files.txt]
DONE Shutting down client, no more keys

NodeJS, Multi-FileWatcher, Socket.IO and interaction with the client

I'm new to NodeJS and I wonder how to properly send the content of several files using FS and Socket.IO.
My problem is more about best practices on Node/javascript than the actual 'raw' logic of my scripts.
So, the purpose of my app is to watch a log File (File1.log) and a Result File (File2.log).
Until the File2.log contains a string (such as "Done", or "Error"), I need to continue sending the result of the File1.log to the client.
When the key ("Error","Done") has been read, I send the result to the client and have to launch the same process for another couple of log/result files - after having closed the FileWatcher on the first one.
In the end, I need to close the connection and stop all of the sleeping FileWatcher processes.
The 'streaming' of my files is working pretty well, but I am confused about the best way to switch between the differents FileWatch Processes and how to notify the client.
Server.JS
/*
* [SomeCode]...
*/
io.sockets.on('connection', function (client) {
//Starting the process for the first couple of files
client.on('logA', function (res) {
var PATH_to_A = "path/to/A/directory/";
readFile(client,PATH_to_A);
});
//Starting the process for the seconde couple of files
client.on('logB', function (res) {
//I need to stop the first readFile Watcher process
var PATH_to_B = "path/to/B/directory/";
readFile(client,PATH_to_B);
});
});
function readFile(client,PATH){
var File1 = path.join(PATH,'File1.log');
var File2 = path.join(PATH,'File2.log');
//Get the file stats
fs.stat(File1,function(err,stats){
if (err) throw err;
//Send the data;
});
//Watch the first file
var w1 = fs.watch(File1,function(status, file){
if(status == "change"){
fs.stat(File1,function(err,stats){
if (err) throw err;
//Send the data;
});
}
});
//Watch the second file
var w2 = fs.watch(File2,function(status, file){
if(status == "change"){
fs.readFile(File2, "utf8", function (err, body) {
if (err) throw err;
//Some Code....
client.emit('done',body);
});
}
});
//Closing FileWatcher
client.on('ack',function(){
w1.close();
w2.close();
});
}
Client.JS
var socket = io.connect('http://127.0.0.1:8000');
//On connect, waiting for the first couple of files
socket.on('connect', function(server) {
socket.emit('init',data);
socket.emit('logA');
});
//If the first process is done, i ask for the second couple of files
socket.on('done',function(message){
socket.emit('ack');
socket.emit('logB');
});
Thanks for your help!

Categories