I have the following piece of code in my "getpics.js" file:
var path = require('path');
var fs = require('fs');
const directoryPath = './public/img/slider'
exports.fileOnDisk = function(){
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
};
console.log(files);
return files;
});
}
return module.exports;
here is my mail.js callup of the module:
var getpics = require('./public/js/slider/getpics.js');
getpics.fileOnDisk();
and this is the printout on the console:
[ 'next.png', 'next_hover.png', 'prev.png', 'prev_hover.png',
'slide1.jpg', 'slide2.jpg', 'slide3.jpg', 'slide4.jpg',
'slide5.jpg' ]
all good until now.
The question is why I cannot export the "files" outside this module, for example in a variable, to use them in my application?
The reason why you're unable to export those files directly is due to the async nature of NodeJS, specifically to the file system call fs.readdir. As that function call is processed in an asynchronous fashion, the code execution will proceed, and you won't be able to access whatever the result of that function is in order to export it. You can read more about it in the about section of NodeJS.
However, the NodeJS file system API does provide synchronous methods. Specifically to your case fs.readdirSync. Using that in your code you would end up with something like:
var path = require('path');
var fs = require('fs');
const directoryPath = './public/img/slider'
exports.fileOnDisk = fs.readdirSync(directoryPath, {encoding: 'utf8'})
You could then import this module and access the array of directories straight from fileOnDisk.
Be careful however as this code will be blocking.
Related
I am writing a unit test case for the , question is mentioned in the link How to stub/mock submodules of a require of nodejs using sinon
when I include a require
const index=require('./index.js');
It has a library require inside it
const library= require('./library.js');
the library.js file has a require which reads config.json file(this config file is also required inside above index.js) as below
const readConfig = require('read-config');
const config = readConfig('./config.json');
I have tried many ways as suggested in the above link but I am failing
const stubs = {
'./library': function (response) {
assert.equal(some, null);
return 'Some ' + argument;
},
'../library1.js': {
function(paths, opts){
var config='./config.json'
return config;
}
},
}
const index=proxyquire('./index.js',stubs)
When I run my unit test case I am still getting the below error
throw configNotFound(configPath);
^
ReadConfigError: Config file not found: ./config.json
I would like to know which part of the code I am missing badly that the code throws the error
I am trying to edit the index.js and all the related files where config is read with the below code
var path = require('path');
var pathToJson = path.resolve(__dirname, '../config.json');
// Load config
var config = fs.readFile(pathToJson , 'utf8', function (err, data) {
if (err) throw err;
config = JSON.parse(data);
});
Here challenge is that I cannot change the node code
You problem is likely to be path resolution. If ./config.json is relative to where you are running Node from (process.cwd()), then it'll work. If it's relative to your library module, then you can do something like:
// Works for JS and JSON
const configPath = require.resolve('./config.json');
// Works in general
const configPath = require('path').join(__dirname, 'config.json');
// Then
const readConfig = require('read-config');
const config = readConfig(configPath);
It's difficult to say if this is the case without knowing more about your project layout and how you're starting your app.
I have written small code to get files filtered by extension. And my point of view logic is fine but I am unable to point out why I am not getting expected output.
Please have a look.
CODE
var fs = require('fs')
var path = require('path')
path_name = process.argv[2]
ext_name = "."+process.argv[3]
var filter_function = function ( path_name,exthide_name,callback) {
fs.readdir(dirpath,function(err,list) {
if(err) return console.error(err)
for ( var i in list) {
if(path.extname(list[i]) == ext_name)
console.log(list[i])
}
})
}
module.exports=filter_function
Output :
linuxmen#linuxmen-fresh:~/test/test1$ node ownModuleNode.js /home/linuxmen/test/test1/ js
linuxmen#linuxmen-fresh:~/test/test1$
But I have so many files with js extension in that directory.
Proof:
linuxmen#linuxmen-fresh:~/test/test1$ ls *js
check_mod1.js ex1.js ex2.js ex3.js ex4.js ex5.js ex6.js ex7.js ex8.js filter.js filter_use.js modse.js ownModuleNode.js se.js use_mod1.js using_module.js
Could please help , what I am missing.
Update - 1 : I am using above code a module file and calling it here.
File using above code
var mymodule = require('./ownModuleNode')
mymodule.filter_function(process.argv[2],process.argv[3])
Update 2 :
var fs = require('fs')
var path = require('path')
path_name = process.argv[2]
ext_name = "."+process.argv[3]
console.log("path_name :",path_name,"extname:",ext_name)
var filter_function = function ( path_name,ext_name,callback) {
fs.readdir(path_name,function(err,list) {
if (err) console.error(err)
console.log(list)
for ( var i in list) {
if(path.extname(list[i]) == ext_name)
console.log(list[i])
}
})
}
module.exports=filter_function
Output:
linuxmen#linuxmen-fresh:~/test/test1$ node ownModuleNode.js /home/linuxmen/test/test1/ js
pathanme : /home/linuxmen/test/test1/ extname: .js
Thank you.
It looks like you are exporting the function directly. When you require() it, you just getting the function. You'll need to use your module in your application. Put this in 'app.js' in the same dir as ownModuleNode.js:
var filterFunction = require('./ownModuleNode');
filterFunction(process.argv[2], process.argv[3]);
Then call it with:
node app ~/Documents/dev/project .js
Outputs:
app.js
ownModuleNode.js
Note that when you pass the extension, you need the preceding dot because path.extname() returns the dot.
I'm following the nodeschool learnyounode tutorial and I'm having some trouble with the module lesson. In short I'm supposed to make a module which I pass a directory to and a file extension and then use a callback to print the list of files with that extension in the folder.
I have two files, my module (mymodule.js):
var module = require('module');
var fs = require('fs');
var path = require('path');
module.exports = function(testDirectory, testExtension, callback)
{
fs.readdir(testDirectory, function(error, folderContents)
{
if (error) return callback(error);
for (i = 0; i < folderContents.length; i++)
{
var fileExtension = path.extname(folderContents[i]);
if(fileExtension === "." + testExtension)
{
callback(null, folderContents[i]);
}
}
});
};
and my app file (program.js):
var mymodule = require('./mymodule.js');
mymodule(process.argv[2], process.argv[3], function(error, data)
{
console.log(data);
});
Whenever I try test my app file, I get TypeError: mymodule is not a function
I've done lots of reading, but the more I read the more confused I become. Someone please help?
The argument of require(...) in node.js is a filename. If I had a module source code in a string code, could I somehow call require(code) and load functions from that string?
I put this into a function for reuse. It creates a file in the os temp directory based on a random hash, requires it and then deletes it.
var fs = require('fs'),
os = require('os'),
crypto = require('crypto');
function requireString(moduleString) {
var token = crypto.randomBytes(20).toString('hex'),
filename = os.tmpdir() + '/' + token + '.js',
requiredModule = false;
// write, require, delete
fs.writeFileSync(filename, moduleString);
requiredModule = require(filename);
fs.unlinkSync(filename);
return requiredModule;
}
Then you can do:
var carString = "exports.start = function(){ console.log('start'); };",
car = requireString(carString);
console.log("Car:", car);
This is still more of a workaround, but more convenient to use, I think.
A work around could be to write the module source code to a temporary file ./tmp-file.js and then require('./tmp-file'), and then remove the file.
This is probably not optimal because you would either have to block and write the file synchronously, or put everything requiring that module in the callback to the async write.
A working example for async file write (gist - also includes sync file write):
var http = require('http');
var fs = require('fs');
var helloModuleString = "exports.world = function() { return 'Hello World\\n'; }";
fs.writeFile('./hello.js', helloModuleString, function (err) {
if (err) return console.log(err);
var hello = require('./hello');
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(hello.world());
}).listen(1337, '127.0.0.1');
console.log('Server running at http://127.0.0.1:1337/');
});
Results in:
$ curl 127.0.0.1:1337
> Hello World
I need to pass in a text file in the terminal and then read the data from it, how can I do this?
node server.js file.txt
How do I pass in the path from the terminal, how do I read that on the other side?
You'll want to use the process.argv array to access the command-line arguments to get the filename and the FileSystem module (fs) to read the file. For example:
// Make sure we got a filename on the command line.
if (process.argv.length < 3) {
console.log('Usage: node ' + process.argv[1] + ' FILENAME');
process.exit(1);
}
// Read the file and print its contents.
var fs = require('fs')
, filename = process.argv[2];
fs.readFile(filename, 'utf8', function(err, data) {
if (err) throw err;
console.log('OK: ' + filename);
console.log(data)
});
To break that down a little for you process.argv will usually have length two, the zeroth item being the "node" interpreter and the first being the script that node is currently running, items after that were passed on the command line. Once you've pulled a filename from argv then you can use the filesystem functions to read the file and do whatever you want with its contents. Sample usage would look like this:
$ node ./cat.js file.txt
OK: file.txt
This is file.txt!
[Edit] As #wtfcoder mentions, using the "fs.readFile()" method might not be the best idea because it will buffer the entire contents of the file before yielding it to the callback function. This buffering could potentially use lots of memory but, more importantly, it does not take advantage of one of the core features of node.js - asynchronous, evented I/O.
The "node" way to process a large file (or any file, really) would be to use fs.read() and process each available chunk as it is available from the operating system. However, reading the file as such requires you to do your own (possibly) incremental parsing/processing of the file and some amount of buffering might be inevitable.
Usign fs with node.
var fs = require('fs');
try {
var data = fs.readFileSync('file.txt', 'utf8');
console.log(data.toString());
} catch(e) {
console.log('Error:', e.stack);
}
IMHO, fs.readFile() should be avoided because it loads ALL the file in memory and it won't call the callback until all the file has been read.
The easiest way to read a text file is to read it line by line. I recommend a BufferedReader:
new BufferedReader ("file", { encoding: "utf8" })
.on ("error", function (error){
console.log ("error: " + error);
})
.on ("line", function (line){
console.log ("line: " + line);
})
.on ("end", function (){
console.log ("EOF");
})
.read ();
For complex data structures like .properties or json files you need to use a parser (internally it should also use a buffered reader).
You can use readstream and pipe to read the file line by line without read all the file into memory one time.
var fs = require('fs'),
es = require('event-stream'),
os = require('os');
var s = fs.createReadStream(path)
.pipe(es.split())
.pipe(es.mapSync(function(line) {
//pause the readstream
s.pause();
console.log("line:", line);
s.resume();
})
.on('error', function(err) {
console.log('Error:', err);
})
.on('end', function() {
console.log('Finish reading.');
})
);
I am posting a complete example which I finally got working. Here I am reading in a file rooms/rooms.txt from a script rooms/rooms.js
var fs = require('fs');
var path = require('path');
var readStream = fs.createReadStream(path.join(__dirname, '../rooms') + '/rooms.txt', 'utf8');
let data = ''
readStream.on('data', function(chunk) {
data += chunk;
}).on('end', function() {
console.log(data);
});
The async way of life:
#! /usr/bin/node
const fs = require('fs');
function readall (stream)
{
return new Promise ((resolve, reject) => {
const chunks = [];
stream.on ('error', (error) => reject (error));
stream.on ('data', (chunk) => chunk && chunks.push (chunk));
stream.on ('end', () => resolve (Buffer.concat (chunks)));
});
}
function readfile (filename)
{
return readall (fs.createReadStream (filename));
}
(async () => {
let content = await readfile ('/etc/ssh/moduli').catch ((e) => {})
if (content)
console.log ("size:", content.length,
"head:", content.slice (0, 46).toString ());
})();