Parsing Json after reading file with fs - javascript

I'm trying to establish communication between two node.js scripts.
The first one does a get request and write the response in a file.
The second watch the file after changes, then he read it, and prompt the result.
The first (get then write)
var request = require('request');
var parseString = require('xml2js').parseString;
var fs = require('fs');
//Some needed variables
streamInterval = setInterval(function() {
request.get(addr, function (error, response, body) {
if (!error && response.statusCode == 200) {
parseString(body,{ explicitArray : false, ignoreAttrs : true }, function (err, result) {
var jsonResult = JSON.stringify(result);
var result = JSON.parse(jsonResult);
fs.writeFile(outputDeparts, JSON.stringify(result, null, 4), function(err) {
if(err) {
console.log(err);
}
});
});
}else{
console.log("An error occured : " + response.statusCode);
}
}).auth(LOGIN,PASS,true);
}, 30000);
The second (watch after changes, read and prompt)
var fs = require('fs');
//Some needed variables
fs.watch(outputDeparts, (eventType, filename) => {
console.log(`event type is: ${eventType}`);
if (filename) {
console.log(`filename provided: ${filename}`);
fs.readFile(outputDeparts, 'utf8', function (err, data) {
if (err){
throw err;
}else{
console.log('start parsing');
console.log(data);
var result = JSON.parse(data);
var departs = result["passages"]["train"];
console.log(`next train [${departs[0]["num"]}] at : ${departs[0]["date"]}`);
}
});
} else {
console.log('filename not provided');
}
});
The first time the file is changed everything is ok ! But after 30 second, at the second change I get the following error :
undefined:1
SyntaxError: Unexpected end of input
at Object.parse (native)
at /Users/adobe/Documents/workspace/playWithNode/watchFile.js:17:23
at tryToString (fs.js:414:3)
at FSReqWrap.readFileAfterClose [as oncomplete] (fs.js:401:12)
I thought it was a problem with reading/writting a file asynchronously but didn't manage to find a fix...
Is everyone able to help me or having clues ? Thanks

..it isn't encouraged to access the file system asynchronously because, while accessing, the target file can be modified by something in the App Life Cycle.
I suggest you to use fs.readFileSync and to wrap the JSON.parse in a try-catch
var fs = require('fs');
function onNextTrain(data) {
console.log("onNextTrain", data);
return data;
}
fs.watch(outputDeparts, (eventType, filename) => {
return new Promise((resolve, reject) => {
if(!filename) {
return reject("FILE NAME NOT PROVIDED");
}
let
data = fs.readFileSync(filename, 'utf8'),
result
;
try {
result = JSON.parse(data);
} catch(error) {
result = data;
}
return resolve(result);
})
.then(onNextTrain)
.catch(err => console.log("error", err))
;
});

Try change code in second file to
var result;
try {
result = JSON.parse(data);
} catch (err) {
console.error(err, data);
}
because SyntaxError: Unexpected end of input it's JSON.parse error.
In first file I see potential problem in
fs.writeFile(outputDeparts, data, function(err) { ...
Callback function must provide ordered write, but your code - only logged error.
Node.js docs
Note that it is unsafe to use fs.writeFile multiple times on the same file without waiting for the callback. For this scenario, fs.createWriteStream is strongly recommended.

Related

Async http-request, node.js modules and variables

I'm currently struggling to get variable values from one node.js module into another. This is my current problem:
I am fetching data from a REST API via https-request:
// customrequest.js
sendRequest( url, function( data, err ) {
if(err) {
console.log('--- Error ---');
console.log( err );
}
else {
console.log('--- Response ---');
console.log(data);
// output: data
return data;
}
module.exports = { sendRequest }
And my index.js file:
// index.js
let sendRequest = require('./customrequest');
let req;
req = sendRequest('google.com');
console.log(req);
// output: undefined
// how can I get the variable set, when request is getting data in response?
I totally understand, that the request to an API takes some time for the response. One solution is, that I just put everything into one js file. But as my project will get bigger over time, the modular approach is my goto-solution. Any suggestions on how to solve this?
Node uses callbacks for this situation. Try something like this:
// customrequest.js
sendRequest(url, callback)
module.exports = { sendRequest }
// index.js
let sendRequest = require('./customrequest');
let req = sendRequest('google.com', function (data, err) {
if (err) {
//handle error here
}
console.log(data);
};
// output: undefined
// how can I get the variable set, when request is getting data in response?
Thanks. The problem I encounter is somewhat different. I solved it with this code snippets … using async and await.
// request.js
const fetch = require('node-fetch')
async function myRequest (somestring) {
try {
let res = await fetch('https://api.domain.com/?endpoint='+somestring)
if (res.ok) {
if (res.ok) return res.json()
return new Error (res.status)
}
} catch (err) {
console.error('An error occurred', err)
}
}
module.exports = { myRequest }
// index.js
const request = require('./requests')
const myRequest = request.myRequest
let myVar;
myRequest('somestring')
.then(res => myVar = res.result)
setInterval(() => {
myRequest('somestring')
.then(res => myVar = res.result)
console.log(myVar)
}, 1000)
The async function and awaits return a promise. This promise is, when resolved, assigned to a variable.

Nodejs respond after data has finished processing in request

I would like a response to differ depending on the finished request I recieve. I am sending a POST request and receive an xml file. The result is either a success or error. I use xml2json to convert the xml into a json object, then depending on the response I want to output json.
The problem is that I can't have a response inside a response. I also can't save the value of the callback for later usage (since its asynchronous).
I have thought about using Promises but I'm not sure. What should I do?
The order of operations should be
1) Send request
2) Get buffer response
3) Join Buffers. Process xml into JSON
4) Depending on the type of JSON entry, output either res.json('success') or res.json('error') if the xml responds with an error.
app.post('/api/submit', (req, res) => {
...
const request = https.request(options, (res) => {
let chunks = [];
res.on("data", function(chunk) {
chunks.push(chunk);
});
res.on("end", function(err) {
if (err) throw err;
let body = Buffer.concat(chunks);
xmlConverter(body, function(err, result) {
console.dir(result);
if (result.entry) {
console.log('✅ Success')
//Respond with json here --> res.json('success')
} else if (result.error) {
console.log('There was an error processing your request');
//or here if there was an error --> res.json('error')
}
});
});
});
request.end()
You can respond inside the callback. The problem is that you have two variable, both named res, so one shadows the other. You just need to change one of the res variable names so your not shadowing it. For example, you can change:
const request = https.request(options, (http_res) // <--change argument name
Then later:
if (result.entry) {
console.log('✅ Success')
http_res.json('success') // <-- use the response object from request
The problem of not being able to save the result for later is a different problem, but easy to solve. The solution though really depends one what you are trying to do. If, for example, you want to further process the data, you can set up a function to call and pass the response data in. Something like:
function process_data(response){
// use the response here
}
Then you can simply call it when you get the data:
if (result.entry) {
console.log('✅ Success')
http_res.json('success') // <-- use the response object from request
process_data(result)
Of course maybe your use case is more complicated but without more details its hard to give a specific answer.
Don't use the same name for both res, because they are different variables. And simply use the out res variable to respond the request with the value you want.
I think it would be something like this:
app.post('/
api/submit', (req, res) => {
...
const request = https.request(options, (resValue) => {
let chunks = [];
resValue.on("data", function(chunk) {
chunks.push(chunk);
});
resValue.on("end", function(err) {
if (err) throw err;
let body = Buffer.concat(chunks);
xmlConverter(body, function(err, result) {
console.dir(result);
if (result.entry) {
console.log('✅ Success')
res.json('success')
} else if (result.error) {
console.log('There was an error processing your request');
res.json('error')
}
});
});
});
request.end()
What exactly is the issue? You are perfectly able to rename the argument of the callback function supplied to https.request(options, callbackFunction) -- it is not important what this variable is named.
app.post('/api/submit', (req, res) => {
const request = https.request(options, (potato) => {
let chunks = [];
potato.on("data", function(chunk) {
chunks.push(chunk);
});
potato.on("end", function(err) {
if (err) throw err; // TODO res.status(500).json({}); ??
let body = Buffer.concat(chunks);
xmlConverter(body, function(err, result) {
console.dir(result);
if (result.entry) {
console.log('✅ Success')
res.status(200).json({});
} else if (result.error) {
console.log('There was an error processing your request');
res.status(500).json({});
}
request.end()
});
});
});
});

Async .eachLimit callback Error not called

I the following code in my node.js project.
async.eachLimit(dbresult, 1, function (record, callback) {
var json = JSON.stringify(record)
var form = new FormData()
form.append('data', json)
form.submit(cfg.server + '/external/api', function (err, res) {
if (err) {
callback(err)
}
if (res.statusCode === 200) {
connection.query('UPDATE selected_photos set synced = 1 WHERE selected_id = "' + record.selected_id + '"', function (err, result) {
if (err) {
console.log(err)
callback(err)
} else {
callback()
}
})
} else {
console.log(res.statusCode)
return callback(err)
}
})
}, function (err) {
// if any of the file processing produced an error, err would equal that error
if (err) {
// One of the iterations produced an error.
// All processing will now stop.
console.log('A share failed to process. Try rerunning the Offline Sync')
process.exit(0)
} else {
console.log('All files have been processed successfully')
process.exit(0)
}
})
}
res.statusCode = 302 So this should error out. But the the error callback is never triggered. How do it get it to trigger the error so that it stops eachLimit and the shows the
console.log('A share failed to process. Try rerunning the Offline Sync')
You have:
if (err) {
in first line of form submit handler. After that, you are sure that there was no error. So when you check response statusCode and try to call back with error, you are calling back with empty value.
That is why you do not get error when checking for it in your final callback function.
Try to create new Error('Status not OK: ' + res.statusCode) when calling back from your form submit handler.

Javascript scoping issue, object has no data after assigning it

I am using NodeJS to create an express endpoint that will retrieve the metadata from my images stored on my server. I have the following code for the logic of the endpoint:
/*
* Gallery Controller
*/
var fs = require('fs'),
_ = require('underscore'),
im = require('imagemagick');
/**
* List paths to images stored in selected gallery
*/
exports.list = function(req, res) {
var dir = 'public/images/' + req.params.id;
fs.readdir(dir, function(err, files) {
if (err) return res.send({error: 'No gallery found with provided id'}, 404);
if (files.length > 0) {
var collection = [],
myData = {};
files.forEach(function(file) {
if(file === '.DS_Store') return;
im.readMetadata( dir + '/' + file, function(err, metadata) {
if (err) throw err;
myData = metadata;
console.log(myData); // logs as object with expected data
});
console.log(myData); // logs as empty {}
collection.push(myData);
});
console.log(collection); // logs as [ {}, {} ]
res.json(collection, 200);
} else {
res.json({error: 'Selected gallery is empty'}, 404);
}
});
};
I've listed what the logs appear as in the terminal, why am I getting this scoping issue? I can't seem to wrap my head around it. If I try to return the metadata obj and assign it to the var, I get the following error: TypeError: Converting circular structure to JSON
Use the async module, it'll improve your life in many ways.
The problem you are having is a common one I see, and it is that your loop is asynchronous, but you are treating it as something serial.
Instead of doing files.forEach, you want to loop them asynchronously and then do some more stuff when the looping is done. You can use async.each for that.
async.each(files, function (file, next) {
if (file === '.DS_Store') return next();
im.readMetadata(path.join(dir, file), function (e, data) {
collection.push(data);
next(err);
});
}, function (err) {
if (err) throw err;
console.log(collection);
});
As an alternative, an even more appropriate solution might be to use async.map.
async.map(files, function (file, next) {
if (file === '.DS_Store') return next();
im.readMetadata(path.join(dir, file), next);
}, function (err, collection) {
if (err) throw err;
console.log(collection);
});
You need to restructure your code:
files.forEach(function(file, i) {
if (file === '.DS_Store') return; // see text
im.readMetadata( dir + '/' + file, function(err, metadata) {
if (err) throw err;
collection.push(metadata);
if (i === files.length - 1) {
res.json(collection); // see text
}
});
});
The reason is that the metadata is only available when the callback function to readMetadata is called; that's how asynchronous I/O works in Node.
In that callback, you add the metadata to the collection. If the iteration of the forEach has reached the final element (i is the index of the current element, when its value is one less than the size of the array, it's the last element), the response is sent.
Two issues:
if .DS_Store is the last/only file in the directory, this code will fail because it will never send back a response; I'll leave it to you to deal with that case ;)
res.json will, by default, return a 200 status so you don't have to specify it; if you do want to specify a status, it needs to be res.json(200, collection) (arguments swapped)

Get data from fs.readFile [duplicate]

This question already has answers here:
Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference
(7 answers)
Closed 7 days ago.
var content;
fs.readFile('./Index.html', function read(err, data) {
if (err) {
throw err;
}
content = data;
});
console.log(content);
Logs undefined, why?
To elaborate on what #Raynos said, the function you have defined is an asynchronous callback. It doesn't execute right away, rather it executes when the file loading has completed. When you call readFile, control is returned immediately and the next line of code is executed. So when you call console.log, your callback has not yet been invoked, and this content has not yet been set. Welcome to asynchronous programming.
Example approaches
const fs = require('fs');
// First I want to read the file
fs.readFile('./Index.html', function read(err, data) {
if (err) {
throw err;
}
const content = data;
// Invoke the next step here however you like
console.log(content); // Put all of the code here (not the best solution)
processFile(content); // Or put the next step in a function and invoke it
});
function processFile(content) {
console.log(content);
}
Or better yet, as Raynos example shows, wrap your call in a function and pass in your own callbacks. (Apparently this is better practice) I think getting into the habit of wrapping your async calls in function that takes a callback will save you a lot of trouble and messy code.
function doSomething (callback) {
// any async callback invokes callback with response
}
doSomething (function doSomethingAfter(err, result) {
// process the async result
});
There is actually a Synchronous function for this:
http://nodejs.org/api/fs.html#fs_fs_readfilesync_filename_encoding
Asynchronous
fs.readFile(filename, [encoding], [callback])
Asynchronously reads the entire contents of a file. Example:
fs.readFile('/etc/passwd', function (err, data) {
if (err) throw err;
console.log(data);
});
The callback is passed two arguments (err, data), where data is the contents of the file.
If no encoding is specified, then the raw buffer is returned.
SYNCHRONOUS
fs.readFileSync(filename, [encoding])
Synchronous version of fs.readFile. Returns the contents of the file named filename.
If encoding is specified then this function returns a string. Otherwise it returns a buffer.
var text = fs.readFileSync('test.md','utf8')
console.log (text)
function readContent(callback) {
fs.readFile("./Index.html", function (err, content) {
if (err) return callback(err)
callback(null, content)
})
}
readContent(function (err, content) {
console.log(content)
})
Using Promises with ES7
Asynchronous use with mz/fs
The mz module provides promisified versions of the core node library. Using them is simple. First install the library...
npm install mz
Then...
const fs = require('mz/fs');
fs.readFile('./Index.html').then(contents => console.log(contents))
.catch(err => console.error(err));
Alternatively you can write them in asynchronous functions:
async function myReadfile () {
try {
const file = await fs.readFile('./Index.html');
}
catch (err) { console.error( err ) }
};
This line will work,
const content = fs.readFileSync('./Index.html', 'utf8');
console.log(content);
var data = fs.readFileSync('tmp/reltioconfig.json','utf8');
use this for calling a file synchronously,
without encoding its showing output as a buffer.
As said, fs.readFile is an asynchronous action. It means that when you tell node to read a file, you need to consider that it will take some time, and in the meantime, node continued to run the following code. In your case it's: console.log(content);.
It's like sending some part of your code for a long trip (like reading a big file).
Take a look at the comments that I've written:
var content;
// node, go fetch this file. when you come back, please run this "read" callback function
fs.readFile('./Index.html', function read(err, data) {
if (err) {
throw err;
}
content = data;
});
// in the meantime, please continue and run this console.log
console.log(content);
That's why content is still empty when you log it. node has not yet retrieved the file's content.
This could be resolved by moving console.log(content) inside the callback function, right after content = data;. This way you will see the log when node is done reading the file and after content gets a value.
From Node v8
Use the built in promisify library to make these old callback functions more elegant.
const fs = require('fs');
const util = require('util');
const readFile = util.promisify(fs.readFile);
async function doStuff() {
try {
const content = await readFile(filePath, 'utf8');
console.log(content);
} catch (e) {
console.error(e);
}
}
From Node v10
You can use the promises version of fs API:
import { promises as fs } from 'fs';
async function doStuff() {
try {
const content = await fs.readFile(filePath, 'utf8');
console.log(content);
} catch (e) {
console.error(e);
}
}
const fs = require('fs')
function readDemo1(file1) {
return new Promise(function (resolve, reject) {
fs.readFile(file1, 'utf8', function (err, dataDemo1) {
if (err)
reject(err);
else
resolve(dataDemo1);
});
});
}
async function copyFile() {
try {
let dataDemo1 = await readDemo1('url')
dataDemo1 += '\n' + await readDemo1('url')
await writeDemo2(dataDemo1)
console.log(dataDemo1)
} catch (error) {
console.error(error);
}
}
copyFile();
function writeDemo2(dataDemo1) {
return new Promise(function(resolve, reject) {
fs.writeFile('text.txt', dataDemo1, 'utf8', function(err) {
if (err)
reject(err);
else
resolve("Promise Success!");
});
});
}
sync and async file reading way:
//fs module to read file in sync and async way
var fs = require('fs'),
filePath = './sample_files/sample_css.css';
// this for async way
/*fs.readFile(filePath, 'utf8', function (err, data) {
if (err) throw err;
console.log(data);
});*/
//this is sync way
var css = fs.readFileSync(filePath, 'utf8');
console.log(css);
Node Cheat Available at read_file.
var path = "index.html"
const readFileAsync = fs.readFileSync(path, 'utf8');
// console.log(readFileAsync)
using simple readFileSync works for me.
var fs = require('fs');
var path = (process.cwd()+"\\text.txt");
fs.readFile(path , function(err,data)
{
if(err)
console.log(err)
else
console.log(data.toString());
});
var content;
fs.readFile('./Index.html', function read(err, data) {
if (err) {
throw err;
}
content = data;
});
console.log(content);
This is just because node is asynchronous and it will not wait for the read function and as soon as the program starts it will console the value as undefined, Which is actually true because there is no value assigned to content variable.
To handle we can use promises, generators etc.
We can use promise in this way.
new Promise((resolve,reject)=>{
fs.readFile('./index.html','utf-8',(err, data)=>{
if (err) {
reject(err); // in the case of error, control flow goes to the catch block with the error occured.
}
else{
resolve(data); // in the case of success, control flow goes to the then block with the content of the file.
}
});
})
.then((data)=>{
console.log(data); // use your content of the file here (in this then).
})
.catch((err)=>{
throw err; // handle error here.
})
The following is function would work for async wrap or promise then chains
const readFileAsync = async (path) => fs.readFileSync(path, 'utf8');
you can read file by
var readMyFile = function(path, cb) {
fs.readFile(path, 'utf8', function(err, content) {
if (err) return cb(err, null);
cb(null, content);
});
};
Adding on you can write to file,
var createMyFile = (path, data, cb) => {
fs.writeFile(path, data, function(err) {
if (err) return console.error(err);
cb();
});
};
and even chain it together
var readFileAndConvertToSentence = function(path, callback) {
readMyFile(path, function(err, content) {
if (err) {
callback(err, null);
} else {
var sentence = content.split('\n').join(' ');
callback(null, sentence);
}
});
};
To put it roughly, you're dealing with node.js which is asynchronous in nature.
When we talk about async, we're talking about doing or processing info or data while dealing with something else. It is not synonymous to parallel, please be reminded.
Your code:
var content;
fs.readFile('./Index.html', function read(err, data) {
if (err) {
throw err;
}
content = data;
});
console.log(content);
With your sample, it basically does the console.log part first, thus the variable 'content' being undefined.
If you really want the output, do something like this instead:
var content;
fs.readFile('./Index.html', function read(err, data) {
if (err) {
throw err;
}
content = data;
console.log(content);
});
This is asynchronous. It will be hard to get used to but, it is what it is.
Again, this is a rough but fast explanation of what async is.
I like using fs-extra because all functions are promisified, right out of the box, so you can use await. So your code could look like this:
(async () => {
try {
const content = await fs.readFile('./Index.html');
console.log(content);
} catch (err) {
console.error(err);
}
})();

Categories