NodeJS, Multi-FileWatcher, Socket.IO and interaction with the client - javascript

I'm new to NodeJS and I wonder how to properly send the content of several files using FS and Socket.IO.
My problem is more about best practices on Node/javascript than the actual 'raw' logic of my scripts.
So, the purpose of my app is to watch a log File (File1.log) and a Result File (File2.log).
Until the File2.log contains a string (such as "Done", or "Error"), I need to continue sending the result of the File1.log to the client.
When the key ("Error","Done") has been read, I send the result to the client and have to launch the same process for another couple of log/result files - after having closed the FileWatcher on the first one.
In the end, I need to close the connection and stop all of the sleeping FileWatcher processes.
The 'streaming' of my files is working pretty well, but I am confused about the best way to switch between the differents FileWatch Processes and how to notify the client.
Server.JS
/*
* [SomeCode]...
*/
io.sockets.on('connection', function (client) {
//Starting the process for the first couple of files
client.on('logA', function (res) {
var PATH_to_A = "path/to/A/directory/";
readFile(client,PATH_to_A);
});
//Starting the process for the seconde couple of files
client.on('logB', function (res) {
//I need to stop the first readFile Watcher process
var PATH_to_B = "path/to/B/directory/";
readFile(client,PATH_to_B);
});
});
function readFile(client,PATH){
var File1 = path.join(PATH,'File1.log');
var File2 = path.join(PATH,'File2.log');
//Get the file stats
fs.stat(File1,function(err,stats){
if (err) throw err;
//Send the data;
});
//Watch the first file
var w1 = fs.watch(File1,function(status, file){
if(status == "change"){
fs.stat(File1,function(err,stats){
if (err) throw err;
//Send the data;
});
}
});
//Watch the second file
var w2 = fs.watch(File2,function(status, file){
if(status == "change"){
fs.readFile(File2, "utf8", function (err, body) {
if (err) throw err;
//Some Code....
client.emit('done',body);
});
}
});
//Closing FileWatcher
client.on('ack',function(){
w1.close();
w2.close();
});
}
Client.JS
var socket = io.connect('http://127.0.0.1:8000');
//On connect, waiting for the first couple of files
socket.on('connect', function(server) {
socket.emit('init',data);
socket.emit('logA');
});
//If the first process is done, i ask for the second couple of files
socket.on('done',function(message){
socket.emit('ack');
socket.emit('logB');
});
Thanks for your help!

Related

Request ends before readstream events are handled

I'm attempting to make a nodejs function which reads back data from a file with the following code:
app.post('/DownloadData', function(req, res)
{
req.on('data', function(data) {
if (fs.existsSync('demoDataFile.dat')) {
var rstream = fs.createReadStream('demoDataFile.dat');
var bufs = [];
rstream.on('data', function(chunk) {
bufs.push(chunk);
console.log("data");
});
rstream.on('end', function() {
downbuf = Buffer.concat(bufs);
console.log(downbuf.length);
});
}
});
req.on('end', function() {
console.log("end length: " + downbuf.length);
res.end(downbuf);
});
req.on('error', function(err)
{
console.error(err.stack);
});
});
The problem is, the buffer comes back as empty as the req.on('end' ... is called before any of the rstream.on events ("data" and the length aren't printed in the console until after "end length: " has been printed). Am I handling the events wrong or is there some other issue? Any guidance would be appreciated.
Not sure why you're reading from req, because you're not using the body data at all. Also, because the data event can trigger multiple times, the code you're using to read the file may also get called multiple times, which probably isn't what you want.
Here's what I think you want:
app.post("/DownloadData", function(req, res) {
let stream = fs.createReadStream("demoDataFile.dat");
// Handle error regarding to creating/opening the file stream.
stream.on('error', function(err) {
console.error(err.stack);
res.sendStatus(500);
});
// Read the file data into memory.
let bufs = [];
stream.on("data", function(chunk) {
bufs.push(chunk);
console.log("data");
}).on("end", function() {
let downbuf = Buffer.concat(bufs);
console.log(downbuf.length);
...process the buffer...
res.end(downbuf);
});
});
You have to be aware that this will read the file into memory entirely. If it's a big file, it may require a lot of memory.
Since you don't specify which operations you have to perform on the file data, I can't recommend an alternative, but there are various modules available that can help you process file data in a streaming fashion (i.e. without having to read the file into memory entirely).

Sails.js downloading file

I'm beginner in sails.js and I need to add some logic to existing server.
I need to retrieve image from shared network, so I added ImageController.js and action in it:
module.exports = {
download: function(req, res) {
var run = req.param('run');
var test = req.param('test');
var file = req.param('file');
var path = util.format('\\\\tch-smain\\Share\\AutoTest\\Screenshots\\%s\\%s\\%s.jpeg', run, test, file);
fileAdapter.read(path)
.on('error', function (err){
return res.serverError(err);
})
.pipe(res);
}
};
Then I registered route in config\routes.js:
'/img': 'ImageController.download'
But when I trying to execute GET /img?run=a&test=b&file=c I get 500 error Internal Server Error. Something isn't right here.
But file on \\tch-smain\Share\AutoTest\Screenshots\a\b\c.jpeg exists

Node script doesn't ever end

I have the node script below to basically copy the contents of some files and insert them to mongo.
The script never seems to end and even though all the data gets inserted successfully, I always have to do Ctrl+C to kill it.
Is there something i'm supposed to use in node.js to end a script?
var mongoose = require('mongoose');
mongoose.connect('mongodb://localhost/testdb');
var dir = './seeds';
var db = mongoose.connection;
// Show connection error if there is one
db.on('error', console.error.bind(console, 'Database Connection Error:'));
// If we successfully connected to mongo
db.once('open', function callback() {
var fs = require('fs'); // Used to get all the files in a directory
// Read all the files in the folder
fs.readdir(dir, function(err, list) {
// Log the error if something went wrong
if(err) {
console.log('Error: '+err);
}
// For every file in the list
list.forEach(function(file) {
// Set the filename without the extension to the variable collection_name
var collection_name = file.split(".")[0];
var parsedJSON = require(dir + '/' + file);
for(var i = 0; i < parsedJSON.length; i++) {
// Counts the number of records in the collection
db.collection('cohort').count(function(err, count) {
if(err) {
console.log(err);
}
});
db.collection(collection_name).insert(parsedJSON[i], function(err, records) {
if(err) {
console.log(err);
}
console.log(records[0]);
console.log("Record added as "+records[0]);
});
}
});
});
});
When everything is done, call mongoose.disconnect(). As #AaronDufour correctly points out, node will not exit while event handler callbacks are registered because it doesn't know that no more events are expected, like the connection emitting a 'close' or 'error' event, for example.
you can call process.exit(); to exit

node.js never exits after insert to couchbase, opposite of most node questions

My problem seems to be the opposite of every node.js question :-) I have a simple forEach loop to read a list of files and insert them into a Couchbase database. This works great, but it never exits after reading all the lines. So I added a counter to shutdown the couchbase connection after all inserts are complete. This works.
This process is intended to load hundreds of thousands of files, so I brought the async module into the mix to batch the inserts into groups of 100. The async.eachLimit is used to iterate over the array and insert documents in batches. Now the orig problem is back. Whatever magic async.eachLimit uses to recognize the process is complete is not happening.
I've been going through javascript scoping, callbacks, async, etc. Google searches are hitting keywords but not this issue. I've reduced the code down to the following testcase. To test, create three files and add their names to testlist.txt.
The async.eachLimit in place works up until it hits the limit, then hangs. Comment this out and uncomment array.forEach line and it works. Thanks in advance!
var fs = require('fs');
var couchbase = require('couchbase');
var async = require('async');
var filelist = 'testlist.txt';
var key_count = 0;
var cb_config = { host: 'localhost:8091', bucket: 'default'};
var db = new couchbase.Connection(cb_config, function(err) {
if (err) {
console.log('ERRR connect to couchbase at config['+cb_config+']');
throw err;
}
});
var insertFile=function(line) {
console.log('LOAD ['+line+']');
fs.readFile(line, function(file_err, f_doc) {
if(file_err) throw file_err;
db.set(line, f_doc, function(db_err, db_res){
if (db_err) {
console.log('FAIL ['+line+'] err['+db_err+']');
} else {
console.log('PASS ['+line+']');
}
key_count--;
if (key_count == 0) {
console.log('DONE Shutting down client, no more keys');
db.shutdown();
}
});
});
}
// read list of files into data array from file filelist
fs.readFile(filelist, function(filelist_err, lines) {
if(filelist_err) throw filelist_err;
// HACK split adds empty line to array, use replace to fix
var array = lines.toString().replace(/\n$/, '').split('\n');
key_count = array.length;
console.log('INIT lines['+key_count+']');
async.eachLimit(array, 2, insertFile, function(err) { console.log('FAIL async err['+err+']');} );
//array.forEach(function(data){insertFile(data);return;});
});
Testcase output using array.forEach:
INIT lines[3]
LOAD [files.big.txt]
LOAD [files.little.txt]
LOAD [files.txt]
PASS [files.little.txt]
PASS [files.big.txt]
PASS [files.txt]
DONE Shutting down client, no more keys
Testcase output using async.eachLimit:
INIT lines[3]
LOAD [files.big.txt]
LOAD [files.little.txt]
PASS [files.little.txt]
PASS [files.big.txt]
... hang, never gets to 3...
After review with a coworker, they spotted my mistake. I missed the async callback in my insertFile function. Adding that in works and allows me to remove the key counter! Solution code below:
var fs = require('fs');
var couchbase = require('couchbase');
var async = require('async');
var filelist = 'testlist.txt';
var key_count = 0;
var cb_config = { host: 'localhost:8091', bucket: 'default'};
var db = new couchbase.Connection(cb_config, function(err) {
if (err) {
console.log('ERRR connect to couchbase at config['+cb_config+']');
throw err;
}
});
var insertFile=function(line, callback) {
console.log('LOAD ['+line+']');
fs.readFile(line, function(file_err, f_doc) {
if(file_err) throw file_err;
db.set(line, f_doc, function(db_err, db_res){
if (db_err) {
console.log('FAIL ['+line+'] err['+db_err+']');
callback(db_err);
} else {
console.log('PASS ['+line+']');
callback();
}
});
});
}
// read list of files into data array from file filelist
fs.readFile(filelist, function(filelist_err, data) {
if(filelist_err) throw filelist_err;
// HACK stoopid bug split adds empty line to array, use replace to fix
var array = data.toString().replace(/\n$/, '').split('\n');
key_count = array.length;
console.log('READ files['+key_count+']');
async.eachLimit(array, 2, insertFile, function(err) {
if (err) console.log('LAST with async err['+err+']');
console.log('DONE Shutting down client, no more keys');
db.shutdown();
});
});
And successful output:
$ node testcase.js
READ files[3]
LOAD [files.big.txt]
LOAD [files.little.txt]
PASS [files.little.txt]
LOAD [files.txt]
PASS [files.big.txt]
PASS [files.txt]
DONE Shutting down client, no more keys

Node.js - Organising code and closures - SFTP/Inotify

I was hoping I could get some advice on why my nodejs program is behaving in the way it is.
I am using two modules, node-sftp and node-inotify. I have setup node-inotify to watch a directory and call a function when something is written there, the function being an sftp upload.
Now the problem I have is that processing one file at a time is fine but when I drop 4 files in one go there, the function is called four times but only one sftp upload goes through.
Do I need to order my code in a particular way to ensure that the sftp upload occurs x times, is this something to do with closures perhaps?
This is a basic version of my code...
"event_handler" is called when something happens on a "watched" directory
"check_event" figures out if this type of event is one we want, in this case it's a "write"
"ftp_to_server" prepare connection details
"do_ftp" basically uses the node-sftp module to perform the sftp upload
event_handler = function(event){
var supplier;
check_event(event, supplier, type, ftp_to_server);
};
=================
function check_event(event, handler)
{
if (event.type === 'xxxxxx') {
var file_to_process_name = 'abc';
var file_to_process_dir = 'abc';
var remote_dir = 'abc';
handler(file_to_process_name, file_to_process_dir, remote_dir);
}
}
function ftp_to_server(file_to_process_name, file_to_process_dir, remote_dir) {
var connection_details = conf.ftp.connections
do_ftp(connection_details, file_to_process_name, file_to_process_dir, remote_dir);
}
function do_ftp(connection_details, file_to_process_name, file_to_process_dir, remote_dir) {
var credentials = {
// FTP settings here
};
var local_file = file_to_process_dir + file_to_process_name;
var remote_file = remote_dir + file_to_process_name;
connection = new sftp(credentials, function(err) {
if (err){
throw err;
}
connection.writeFile(remote_file, fs.readFileSync(local_file, "utf8"), null, function(err) {
if (err) {
throw err;
}
console.info('FTP PUT DONE');
});
});
};
Your "connection = new sftp(credentials, function(err) {"
should be
var connection = new sftp(credentials, function(err) {
The way you currently have it coded, "connection" is a global and you are writing over it.

Categories