Trying a few node.js filesystem checks (to confirm if environment is functioning correctly)
When I write my fs.unlink or fs.unlinkSync outside of Mocha it deletes the file as expected:
var fs = require('fs');
var newFile = new Date().getTime() +".txt";
fs.writeFile(newFile, "hello!", function (err) {
if (err) console.log(err);
// console.log("Created file: "+newFile);
fs.readdir(__dirname, function(err, list) {
// console.log(list)
console.log(list.indexOf(newFile) > -1)
fs.unlinkSync(newFile);
console.log('successfully deleted '+newFile);
// console.log("Deleted: "+newFile)
fs.readdir(__dirname, function(err, list) {
if (err) throw err;
console.log(list.indexOf(newFile) === -1);
});
});
});
But when I try the exact same code from inside a mocha test it does not delete the file...
var chai = require('chai');
var assert = chai.assert;
var fs = require('fs');
describe('Node.js Environment Checks', function(){
describe('Basic IO', function(){
it('CREATE (temporary) file tests create/write access to FS', function(){
// setup
var newFile = new Date().getTime() +".txt";
fs.writeFile(newFile, "hello!", function (err) {
if (err) console.log(err);
// console.log("Created file: "+newFile);
fs.readdir(__dirname, function(err, list) {
// console.log(list)
assert.isTrue(list.indexOf(newFile) > -1)
fs.unlinkSync(newFile);
console.log('successfully deleted '+newFile);
// console.log("Deleted: "+newFile)
fs.readdir(__dirname, function(err, list) {
if (err) throw err;
assert.isTrue(list.indexOf(newFile) === -1);
});
});
});
})
})
}) // end node env checks
Am I missing something...?
note: I created an issue on GitHub:
https://github.com/visionmedia/mocha/issues/1058
(If I get a reply there first I will mirror it here)
Use the asynchronous form of testing. Change your it call so that the callback gets the done parameter:
it('CREATE (temporary) file tests create/write access to FS', function(done){
And call it in your innermost async callback:
fs.readdir(__dirname, function(err, list) {
if (err) throw err;
assert.isTrue(list.indexOf(newFile) === -1);
done();
});
Related
I am reading files from ftp using the code below.
var JSFtp = require("jsftp");
var config = require('./config.json');
var FtpService = function () {};
// Connect to FTP
var Ftp = new JSFtp({
host: config.ftp.host,
port: config.ftp.port,
user: config.ftp.user,
pass: config.ftp.pass
});
FtpService.prototype.getFTPDirectoryFiles = function (callback) {
Ftp.list(config.ftp.FilePath, function(err, res) {
if(err){
console.log('File Listing Failed', err);
callback(null,err);
return;
}
else{
console.log(res);
callback(null,res);
}
});
};
FtpService.prototype.closeFtp = function () {
console.log('Disconnect to FTP');
};
module.exports = new FtpService();
Now i include this ftp service js file in my index.js as
var ftp = require('./ftpservice.js');
ftpfiles = ftp.getFTPDirectoryFiles();
console.log(ftpfiles);
getFTPDirectoryFiles returns the list of file. But if i call it via index.js i get undefined ftpfiles. This is because of the asynchronous nature of node js.
so i thought of adding callback but
I am getting the error Callback is not defined in function FtpService.prototype.getFTPDirectoryFiles
In this line:
ftpfiles = ftp.getFTPDirectoryFiles()
you are not passing the callback that that function requires and are trying to use a return value that the function does not return.
You need to do something like this:
var ftp = require('./ftpservice.js');
ftp.getFTPDirectoryFiles(function(err, ftpfiles) {
if (err) {
console.log(err);
} else {
console.log(ftpfiles);
}
});
You need to pass a callbackfunction in your function getFTPDirectoryFiles();
var ftp = require('./ftpservice.js');
var ftpFiles;
function setFtpFiles(err, res) {
if (err) throw err;
ftpFiles = res; // to use "ftpFiles" variable later
console.log(res);
}
ftp.getFTPDirectoryFiles(setFtpFiles);
1 Don't change args order to call callback. (replace callback(null,err); and callback(null,res); by callback(err,res);)
2 You need define a specifc function (your callaback) an give it to ftp.getFTPDirectoryFiles().
var JSFtp = require("jsftp");
var config = require('./config.json');
var FtpService = function () {};
// Connect to FTP
var Ftp = new JSFtp({
host: config.ftp.host,
port: config.ftp.port,
user: config.ftp.user,
pass: config.ftp.pass
});
FtpService.prototype.getFTPDirectoryFiles = function (callback) {
Ftp.list(config.ftp.FilePath, function(err, res) {
if(err){
console.log('File Listing Failed', err);
callback(err, res);
return;
}
else{
console.log(res);
callback(err, res);
}
});
};
FtpService.prototype.getFTPDirectoryFilesSimplify = function (callback) {
// no console.log, but very more simple !
Ftp.list(config.ftp.FilePath, callback);
};
FtpService.prototype.closeFtp = function () {
console.log('Disconnect to FTP');
};
and then :
var ftp = require('./ftpservice.js');
ftpfiles = ftp.getFTPDirectoryFiles(function(err,res){
// do your specifc job here using err and res
});
console.log(ftpfiles);
I have a function that downloads the user input(currently named app.json) from browser(client) to the server
function downloadUpdate(callback) {
//Using formidable node package for downloading user input to server
var form = new formidable.IncomingForm();
form.on('fileBegin', function(name, file) {
file.path = "app.json";
});
form.parse(req, function(err, fields, files) {
res.writeHead(200, {
'content-type': 'text/plain'
});
res.write('received upload:\n\n');
res.end(util.inspect({
fields: fields,
files: files
}));
});
form.on('end', function() {
callback(null);
});
}
I have another function that takes the file downloaded above and converts it into required format(final.json) something like this.
function UpdateCode(callback) {
var obj = fs.readFileSync('app.json', 'utf8');
console.log(abc); //Im getting undefined here
var object = JSON.parse(obj);
var data2 = [];
for (var j = 0; j < object.length; j++) {
if (object[j].value == `${abc}`) {
data2.push(object[j]);
}
}
console.log(data2);
fs.appendFile('final.json', JSON.stringify(data2), function(err) {
if (err) throw err;
console.log('Saved!');
callback(null);
});
}
I used async series function to make them run in an order like this
async.series([
downloadUpload,
UpdateCode
], function(err, result) {
if (err) throw err;
else {
console.log(result);
}
});
All of this code is inside a post request. I'm getting abc from the server
app.post('/', function(req,res){
var abc = req.body.abc;
console.log(abc); //I'm getting abc here
function downloadfile(callback){
//here goes the downloadfile definition
}
function UpdateCode(){
//upload code function
}
//now i call async.series method
async.series([
downloadUpload,
UpdateCode
], function(err, result) {
if (err) throw err;
else {
console.log(result);
}
});
});
the thing is the value of abc is not going to function UploadCode and when I console log abc, I get undefined. Where am I going wrong?
Basically, when I just run csvtojson module on node.js without any code, it works perfectly. But once I put it into the function, it just comes out with undefined even though my file path is still there.
Js code:
var Converter = require("csvtojson").Converter;
// create a new converter object
var converter = new Converter({});
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost:27017/myproject';
// call the fromFile function which takes in the path to your
// csv file as well as a callback function
var woops;
var createNewEntries = function(db, woops, callback) {
converter.fromFile("./NTA-SAM-Inventory-List-Security-Management-
New_2017.csv",function(err, result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
console.log(result);
console.log('ohhhhh');
woops=result;
});
console.log(woops);
};
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
setTimeout(function(){
createNewEntries(db, woops, function(){
if(err)
throw err;
else{
console.log(woops);
}
db.close();
});
},2000);
});
This is just testing out whether it converts inside a function and it just shows
undefined
[]
ohhhhh
without converting at all when in a function. So exactly what did I do wrong. By right it should have convert after calling the function. Does it have to do with my code executing before the function ? I already put a setTimeout just to give it some time to do so I assume it shouldn't have to do with the order of my code. Thanks in advance!
You should try below code for your file name:
__dirname + "/NTA-SAM-Inventory-List-Security-Management-
New_2017.csv"
Replace your code for converter.fromFile() , Now your code would be like that:
converter.fromFile(__dirname + "/NTA-SAM-Inventory-List-Security-Management-
New_2017.csv" ,function(err, result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
console.log(result);
console.log('ohhhhh');
woops=result;
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
setTimeout(function(){
createNewEntries(db, woops, function(){
if(err)
throw err;
else{
console.log(woops);
}
db.close();
});
},2000);
});
});
Hope it will work for you.
If the above code are not wouking then try below code with fast-csv module:
var fcsv = require('fast-csv');
var fs = require('fs');
/**
* Get the records from csv
*/
var writeZipCodes = function () {
var stream = fs.createReadStream(__dirname + "/NTA-SAM-Inventory-List-Security-Management-New_2017.csv");
fcsv
.fromStream(stream, { headers: true }) // headers for columns
.on("data", function (data) {
console.log(data);
var woops=data;
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
setTimeout(function(){
createNewEntries(db, woops, function(){
if(err)
throw err;
else{
console.log(woops);
}
db.close();
});
},2000);
});
})
.on("end", function () {
console.log("done");
});
}
writeZipCodes();
According to your output,
undefined
[]
ohhhhh
var woops;
var createNewEntries = function(db, woops, callback) {
converter.fromFile("./NTA-SAM-Inventory-List-Security-Management-
New_2017.csv",function(err, result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
console.log(result); // This is getting printed second
console.log('ohhhhh'); // This is getting printed third
woops=result;
});
console.log(woops); // This is getting printed first
};
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
setTimeout(function(){
createNewEntries(db, woops, function(){
if(err)
throw err;
else{
console.log(woops); // This is useless!
}
db.close();
});
},2000);
});
You can clearly see this as woops variable is just declared so it must be having undefined value. And something before ohhhhh must be the result variable.
Now, this definitely means that at least the woops variable is not getting printed after ohhhh or rather, the createNewEntries is getting executed or is returning the result after console.log(woops) is being executed which that means your setTimeout()'s time is not sufficient.
And why are you even using the callback and passing it a function when you're not even using it??? Use this instead-
var woops;
var createNewEntries = function(db, woops) {
converter.fromFile("./NTA-SAM-Inventory-List-Security-Management-
New_2017.csv",function(err, result){
// if an error has occured then handle it
if(err){
console.log("An Error Has Occured");
console.log(err);
}
// the result of the conversion
console.log(result);
console.log('ohhhhh');
woops=result;
}).then(console.log(woops));
};
MongoClient.connect(url, function(err, db) {
if(err) {
console.log(err);
}
createNewEntries(db, woops);
});
I am trying to get the name and created date of the files. In the code below it throws error when I call the api. It is reading the directory and printing all the file names but it's not sending back to callback. Any idea what is implemented wrong?
service.js
var fs = require('fs');
var path = require('path');
var async = require('async');
var currentDate = new Date();
var objToReturn = [];
var logsDirectory = './logs'
function readDirectory(env, callback) {
fs.readdir(logsDirectory + '/' + env, function(err, files) {
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + '/' + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
});
},
function(err) {
if (err) {
console.info('error', err);
return;
}
// when you're done reading all the files, do something...
console.log('before Callback', objToReturn);
callback(objToReturn);
});
}
exports.readDirectory = readDirectory;
app.js
var stDirectory = require('./app/serverfiles/stDir');
app.get('/getAllFiles',function(req,res){
var env = req.query.env
console.log('printing',env);
stDirectory.readDirectory(env,function(files){
res.json(files);
console.log('Api files',files);
});
});
There are a few issues:
instead of passing the "final" handler to async.eachSeries(), you're passing it to fs.readdir(), so callback will never get called;
you're declaring objToReturn outside of the function, which isn't a good idea because multiple requests could be handled in parallel;
you're not handling any errors properly;
you should really use the Node.js callback idiom of calling callbacks with two arguments, the first being errors (if there are any) and the second being the result of the asynchronous operation.
The code below should fix these issues:
function readDirectory(env, callback) {
let objToReturn = [];
fs.readdir(
logsDirectory + "/" + env,
function(err, files) {
if (err) return callback(err);
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + "/" + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
return done(err);
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
}, function(err) {
if (err) {
console.info("error", err);
return callback(err);
}
// when you're done reading all the files, do something...
console.log("before Callback", objToReturn);
callback(null, objToReturn);
}
);
}
// To call it:
stDirectory.readDirectory(env, function(err, files) {
if (err) {
res.sendStatus(500);
} else {
res.json(files);
console.log('Api files',files);
}
});
You should also consider using async.mapSeries() instead of async.eachSeries() and using a separate array (objToReturn).
I am writing this code as a project for a customer
and when i go to a show route i got this 500 internal server error
http.get('/files/:id', function(req, res) {
var vid;
var pap;
Videos.find({}, function(err, videos) {
if (err) {
console.log(err);
} else {
vid = videos;
}
});
Papers.find({}, function(err, file) {
if (err) {
console.log(err);
} else {
pap = file;
}
});
Material.findById(req.params.id, function(err, found) {
if (err) {
console.log(err);
} else {
res.render('files', {
file: pap,
video: vid,
current: found
});
}
});
});
this is my show route code.
Note : if i reload the page the error is gone and the page open.
The reason is you need to wait for all the database queries to finish before rendering. In your code, it is possible for the page to render before the other two queries have completed and returned their data. The good news is that Mongoose supports Promises for asynchronous functions.
http.get('/files/:id', function(req, res) {
Promise.all([
Videos.find({}).exec(),
Papers.find({}).exec(),
Material.findById(req.params.id).exec()
]).then( ([video, paper, material]) => {
res.render('files', {
file: paper,
video: video,
current: material
});
}).catch( error => console.log(error) );
});
The functions you're using with Mongoose are asynchronous in nature; the variables vid and pap are not initialized when you run res.render. When you attempt to use those variables in your frontend (template like Jade, Handlebars EJS, I don't know what you're using), they are undefined, and subsequently cause the 500 error. You'll need to run the functions such that the results of all Mongoose queries are available to res.render when it runs; either using an async NodeJS library, or calling each function within one another and then calling res.render at the end.
Solution 1: Using async Node module
var async = require('async');
async.parallel([
// Each function in this array will execute in parallel
// The callback function is executed once all functions in the array complete
function (cb) {
Videos.find({}, function(err, videos) {
if (err) {
return cb(err);
} else {
return cb(null, videos);
}
});
},
function (cb) {
Papers.find({}, function(err, papers) {
if (err) {
return cb(err);
} else {
return cb(null, papers);
}
});
},
function (cb) {
Material.findById(req.params.id, function(err, found) {
if (err) {
return cb(err);
} else {
return cb(null, found);
}
});
}
], function (err, results) {
if (err) {
// If any function returns an error
// (first argument), it will be here
console.log(err);
}
else {
// Even though the functions complete asynchronously,
// the order in which they are declared in the array
// will correspond to the position in the array
// if it returns anything as a second argument.
var videos = results[0];
var files = results[1];
var found = results[2];
res.render('files', {
file: files,
video: videos,
current: found
});
}
});
Solution 2: Nested Callbacks
Videos.find({}, function(err, videos) {
var vid = videos;
if (err) {
console.log(err);
} else {
Papers.find({}, function(err, file) {
var pap = file;
if (err) {
console.log(err);
} else {
Material.findById(req.params.id, function(err, found) {
if (err) {
console.log(err);
} else {
res.render('files', {
file: pap,
video: vid,
current: found
});
}
});
}
});
}
});