Read contents of a file and print it to console using javascript - javascript

I am new to javascript and I would like to specify a javascript program to read from a file and print the contents of the file to a console?.This is the code I have written below and am getting an error,please what's wrong with it?
var express = require('express');
var app = express.createServer(express.logger());
app.get('/',function(request,response){
var fs = require('fs');
var buffer = new Buffer(fs.readFileSync('index.html','utf8'));
response.send(Buffer.toString());
});
var port = process.env.PORT || 5000;
app.listen(port,function()
{
fs.readFileSync();
console.log("Listening on"+ port);
}
);

Use the readFile method of the fs object to read the file, then use console.log to print it:
/* File System Object */
var fs = require('fs');
/* Read File */
fs.readFile('foo.json', bar)
function bar (err, data)
{
/* If an error exists, show it, otherwise show the file */
err ? Function("error","throw error")(err) : console.log(JSON.stringify(data) );
};
For instance, if it is named loadfiles.js, run it as such:
node loadfiles.js

Related

Getting the error: Cannot GET / despite linking HTML file

I am trying to use host the following js file using nodejs as my first time. The following is the beginning of code to it. I had to use JSDOM since I was trying to use the buttons from the blog.html file in here. The file 'index.html' is present in the folder public and when I run it on terminal, I see the message 'hosting' but it says 'cannot GET /'. Here is my code:
var request = require('request');
const express = require('express');
const app =express();
app.listen(3000, ()=>console.log('hosting'));
app.use(express.static('public' ));
app.get('./blog.html', function(request, response) {
response.sendFile('blog.html');
});
var jsdom = require('jsdom');
const { JSDOM } = jsdom;
const { document } = (new JSDOM('blog.html')).window;
global.document = document;
const postBtn1 = document.getElementById("post-btn1");
const postBtn2 = document.getElementById("post-btn2");
const postBtn3 = document.getElementById("post-btn3");

export module in node.js

I have the following piece of code in my "getpics.js" file:
var path = require('path');
var fs = require('fs');
const directoryPath = './public/img/slider'
exports.fileOnDisk = function(){
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
};
console.log(files);
return files;
});
}
return module.exports;
here is my mail.js callup of the module:
var getpics = require('./public/js/slider/getpics.js');
getpics.fileOnDisk();
and this is the printout on the console:
[ 'next.png', 'next_hover.png', 'prev.png', 'prev_hover.png',
'slide1.jpg', 'slide2.jpg', 'slide3.jpg', 'slide4.jpg',
'slide5.jpg' ]
all good until now.
The question is why I cannot export the "files" outside this module, for example in a variable, to use them in my application?
The reason why you're unable to export those files directly is due to the async nature of NodeJS, specifically to the file system call fs.readdir. As that function call is processed in an asynchronous fashion, the code execution will proceed, and you won't be able to access whatever the result of that function is in order to export it. You can read more about it in the about section of NodeJS.
However, the NodeJS file system API does provide synchronous methods. Specifically to your case fs.readdirSync. Using that in your code you would end up with something like:
var path = require('path');
var fs = require('fs');
const directoryPath = './public/img/slider'
exports.fileOnDisk = fs.readdirSync(directoryPath, {encoding: 'utf8'})
You could then import this module and access the array of directories straight from fileOnDisk.
Be careful however as this code will be blocking.

Trouble using GET for a json file in node js

I'm new to REST api's and javascript in general and I'm having trouble using GET to retrieve a json file and display it in a browser. I'm only running my api as a localhost for now. I can get my server running but just can't get my json file to display. Below is my code, I have tried different things with the responce but have had no luck with getting it to work. Everything I've tried with it has displayed errors. Both this file and the json file are in the same folder. If someone knows what I need to put for instead of the //responce() it would be greatly appreciated. Thanks!
var http = require('http');
var express = require('express');
var app = express();
var port = process.env.port || 3000;
app.listen(port, function(){
var datetime = new Date();
var message = "Server running on Port:- " + port + " Started at :- " +
datetime;
console.log(message);
});
app.get("/userget", function(request, responce){
var fs = require('fs');
var obj = JSON.parse(fs.readFileSync('database.json', 'utf8'));
//responce()
});
If you have your javascript object, try:
res.status(200).json({
your_javascript_object
});
Put this instead for your /userget route.
```
res.setHeader('Content-Type', 'application/json');
var fs = require('fs');
res.send(JSON.parse(fs.readFileSync('database.json', 'utf8')));
```

Pass Buffer to ChildProcess Node.js

Here I have on Node.Js where I want to do Image Processing in a Sub Process.
As you will see I take the file image.jpg and want to write it back to hello.jpg in a subprocess:
var node = require('child_process').spawn('node',['-i']);
var fs = require('fs');
node.stdout.on('data',function(data) {
var fs = require('fs');
var gm = require('gm').subClass({ imageMagick: true });
gm(data)
.resize(500, 500)
.toBuffer("jpg", function(err, buffer) {
if (err) {
console.log(err);
}else{
fs.writeFile("hello.jpg", buffer);
}
});
});
var buffer = fs.readFileSync(__dirname + "/image.jpg");
node.stdin.write(buffer);
However when I run this file I get this error:
[Error: Stream yields empty buffer]
For me it seems like the buffer is not passed correctly to the subprocess?
What do I wrong? What can I do to run Image Processing in a subtask. For me its important that Its not read from a file in the subprocess. Because I want to read one File again and then send the buffer to several subprocesses that do Image Transformations. Thanks!
You are not doing any work in a subprocess. It is just node -i and nothing else. All your image processing happens in the main process.
To fix it, you can actually run another Node process and give it some script to execute, say worker.js:
process.stdin.on('data',function(data) {
var fs = require('fs');
var gm = require('gm').subClass({ imageMagick: true });
gm(data)
.resize(500, 500)
.toBuffer("jpg", function(err, buffer) {
if (err) {
console.log(err);
}else{
fs.writeFile("hello.jpg", buffer);
}
});
});
Then you would create a subprocess from your main script:
var node = require('child_process').spawn('node', ['worker.js']);
var fs = require('fs');
var buffer = fs.readFileSync(__dirname + "/image.jpg");
node.stdin.end(buffer);
Note that I used node.stdin.end in the last line to terminate the worker.
Take a look at cluster module for the alternative approach.

Require in nodejs

The argument of require(...) in node.js is a filename. If I had a module source code in a string code, could I somehow call require(code) and load functions from that string?
I put this into a function for reuse. It creates a file in the os temp directory based on a random hash, requires it and then deletes it.
var fs = require('fs'),
os = require('os'),
crypto = require('crypto');
function requireString(moduleString) {
var token = crypto.randomBytes(20).toString('hex'),
filename = os.tmpdir() + '/' + token + '.js',
requiredModule = false;
// write, require, delete
fs.writeFileSync(filename, moduleString);
requiredModule = require(filename);
fs.unlinkSync(filename);
return requiredModule;
}
Then you can do:
var carString = "exports.start = function(){ console.log('start'); };",
car = requireString(carString);
console.log("Car:", car);
This is still more of a workaround, but more convenient to use, I think.
A work around could be to write the module source code to a temporary file ./tmp-file.js and then require('./tmp-file'), and then remove the file.
This is probably not optimal because you would either have to block and write the file synchronously, or put everything requiring that module in the callback to the async write.
A working example for async file write (gist - also includes sync file write):
var http = require('http');
var fs = require('fs');
var helloModuleString = "exports.world = function() { return 'Hello World\\n'; }";
fs.writeFile('./hello.js', helloModuleString, function (err) {
if (err) return console.log(err);
var hello = require('./hello');
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(hello.world());
}).listen(1337, '127.0.0.1');
console.log('Server running at http://127.0.0.1:1337/');
});
Results in:
$ curl 127.0.0.1:1337
> Hello World

Categories