Node.js file write in loop fails randomly - javascript

Here is my code :
function aCallbackInLoop(dataArray) {
dataArray.forEach(function (item, index) {
fs.appendFile(fileName, JSON.stringify(item) + "\r\n", function (err) {
if (err) {
console.log('Error writing data ' + err);
} else {
console.log('Data written');
}
});
});
}
I get random errors :
Data written
Data written
.
.
Error writing data Error: UNKNOWN, open 'output/mydata.json'
Error writing data Error: UNKNOWN, open 'output/mydata.json'
.
.
Data written
Error writing data Error: UNKNOWN, open 'output/mydata.json'
The function (aCallbackInLoop) is a callback for a web-service request, which returns chunks of data in dataArray. Multiple web-service requests are being made in a loop, so this callback is perhaps being called in parallel. I doubt it's some file lock issue, but I am not sure how to resolve.
PS: I have made sure it's not a data issue (I am logging all items in dataArray)
Edit : Code after trying write stream :
function writeDataToFile(fileName, data) {
try {
var wStream = fs.createWriteStream(fileName);
wStream.write(JSON.stringify(data) + "\r\n");
wStream.end();
} catch (err) {
console.log(err.message);
}
}
function aCallbackInLoop(dataArray){
dataArray.forEach(function(item, index){
writeDataToFile(filename, item); //filename is global var
});
}

As you have observed, multiple appendFile calls are not able to proceed because of the previous appendFile calls. In this particular case, it would be better to create a write stream.
var wstream = fs.createWriteStream(fileName);
dataArray.forEach(function (item) {
wstream.write(JSON.stringify(item + "\r\n");
});
wstream.end();
If you want to know when all the data is written, then you can register a function with the finish event, like this
var wstream = fs.createWriteStream(fileName);
wstream.on("finish", function() {
// Writing to the file is actually complete.
});
dataArray.forEach(function (item) {
wstream.write(JSON.stringify(item + "\r\n");
});
wstream.end();

Try using the synchronous version of appendFile - https://nodejs.org/api/fs.html#fs_fs_appendfilesync_filename_data_options

Related

Watson Assistant context is not updated

I use watson assistant v1
My problem is that every time I make a call to the code in Nodejs, where I return the context, to have a coordinated conversation, the context is only updated once and I get stuck in a node of the conversation
this is my code
client.on('message', message => {
//general variables
var carpetaIndividual = <../../../>
var cuerpoMensaje = <....>
var emisorMensaje = <....>
//detect if context exists
if(fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = require(carpetaIndividual+'/contexto.json');
var variableContexto = watsonContexto;
} else {
var variableContexto = {}
}
//conection with Watson Assistant
assistant.message(
{
input: { text: cuerpoMensaje },
workspaceId: '<>',
context: variableContexto,
})
.then(response => {
let messageWatson = response.result.output.text[0];
let contextoWatson = response.result.context;
console.log('Chatbot: ' + messageWatson);
//Save and create JSON file for context
fs.writeFile(carpetaIndividual+'/contexto.json', JSON.stringify(contextoWatson), 'utf8', function (err) {
if (err) {
console.error(err);
}
});
//Send messages to my application
client.sendMessage(emisorMensaje, messageWatson)
})
.catch(err => {
console.log(err);
});
}
client.initialize();
the context.json file is updated, but when it is read the code only reads the first update of the context.json file and not the other updates
This will be because you are using require to read the .json file. For all subsequent requires of an already-required file, the data is cached and reused.
You will need to use fs.readfile and JSON.parse
// detect if context exists
if (fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = fs.readFileSync(carpetaIndividual+'/contexto.json');
// Converting to JSON
var variableContexto = JSON.parse(watsonContexto);
} else {
var variableContexto = {}
}
There is another subtle problem with your code, in that you are relying on
your async call to fs.writeFile completing before you read the file. This will be the case most of the time, but as you don't wait for the fs.writeFile to complete there is the chance that you may try to read the file, before it is written.

Issues reading a small CSV file in Node

ISSUE: I am trying to use Nodejs streams to read a small CSV file (1 row) using the fast-csv module.
The CSV 'rows' are pushed to an array(rows []) when the 'data' event is emitted. When 'end' is emitted, the data is update in a DB. However, the 'end' event is triggered before the rows[] array can be populated. This happens intermittently and sometimes the code works as intended.
My guess after reading the Nodejs docs is that this is due to the small size of the CSV file. The data is being read in the 'flowing' mode and as soon as the the first row is read, the 'end' even is triggered, which seems to happen before the record is pushed to the required array.
Tried using the 'paused' mode, but it didn't work.
I am new with Nodejs and not able to figure out how to make this function work. Any help or guidance would be appreciated.
CODE:
function updateToDb(filename, tempLocation) {
const rows = [];
const readStream = fs.createReadStream(tempLocation + '\\' + filename).pipe(csv.parse());
return new Promise((resolve, reject) => {
readStream.on('data', row => {
console.log('Reading');
rows.push(row);
})
.on('end', () => {
console.log('Completed');
let query = `UPDATE ${tables.earnings} SET result_date = CASE `;
rows.forEach(element => {
query += `WHEN isin = '${element[0]}' AND announcement_date = '${element[1]}' THEN '${element[2]}' ELSE result_date`;
});
query += ' END';
connection.query(query, (error, results) => {
if (error)
reject(error);
else
resolve(results.changedRows);
});
})
.on('error', error => {
reject(error);
});
});
}

Azure Function - js - not running correctly, but no error in the logs

Am trying to build a Function to get data from my IoTHub and send the data to my web service via GET.
This is what I have in my Function:
var http = require('https');
module.exports = function (context, IoTHubMessages) {
IoTHubMessages.forEach(message => {
// context.log(`Processing message9: ${JSON.stringify(message)}`);
console.log(`what the what???`);
let url = `<my site in Azure>.azurewebsites.net`;
console.log(url);
let path = "/sensor/" + message.d1 + "/" + message.d2 + "/" + message.d3 + "/";
console.log(path);
var req = http.request({
host: url,
path: path,
method: 'GET'
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.on('end', function(e) {
console.log('finished with request');
});
req.end();
});
context.done();
};
The logs look like this:
2019-02-10T06:06:22.503 [Information] Executing 'Functions.IoTHub_EventHub1' (Reason='', Id=ea6109b0-5037-4f15-9efc-845222c6f404)
2019-02-10T06:06:22.512 [Information] Executed 'Functions.IoTHub_EventHub1' (Succeeded, Id=ea6109b0-5037-4f15-9efc-845222c6f404)
2019-02-10T06:06:22.786 [Information] Executing 'Functions.IoTHub_EventHub1' (Reason='', Id=f344c44f-a6ff-49b3-badb-58429b3476dc)
2019-02-10T06:06:22.796 [Information] Executed 'Functions.IoTHub_EventHub1' (Succeeded, Id=f344c44f-a6ff-49b3-badb-58429b3476dc)
If I uncomment this line :
context.log(`Processing message9: ${JSON.stringify(message)}`);
then the JSON data is displayed in the log output. In between the Executing and Executed pairs I see:
2019-02-10T05:59:28.906 [Information] Processing message9: {"topic":"iot","d1":"200","d2":"200","d3":"200"}
I am not getting my GET request
I don't see the console.log messages after the initial stringify line
I don't see any errors.
I've tried different quotation marks to see if Node preferred one or the other.
Occasionally when restarting the Function I see a message like this in the log, but ignored it as the log had my JSON string
2019-02-10T06:00:10.600 [Error] Executed 'Functions.IoTHub_EventHub1' (Failed, Id=2b3959cd-5014-4c50-89a3-77e37f2a890e)
Binding parameters to complex objects (such as 'Object') uses Json.NET serialization.
1. Bind the parameter type as 'string' instead of 'Object' to get the raw values and avoid JSON deserialization, or
2. Change the queue payload to be valid json. The JSON parser failed:
Unexpected character encountered while parsing value: T. Path '', line 0, position 0.
The problem here is that the forEach loop is not a loop that is waiting for the result before calling context.done
When this happens as #nelak points out in his comment, the azure function stops and nothing else happens.
Observe the following. I decided to replace the http library for a simple setTimeout function, but this is more or less the same. What is happening with your code, is ilustrated in the next snippet, notice the order in which the console.log are called.
const myFn = function (context, IoTHubMessages) {
IoTHubMessages.forEach(message => {
console.log('inside foreach!')
setTimeout(() => {
console.log('inside settimeout, this is when your request is answered!')
}, 1)
});
console.log('outside all!')
};
myFn(null, [0, 1])
If you waned a different behaviour you could rewrite this with the async-await pattern and then it seems syncronous but it's actually asynchronous.
var callIt = () => {
return new Promise((resolve) => {
setTimeout(() => {
console.log('inside settimeout!')
return resolve('ok')
}, 1)
})
}
var myFnAwait = async (context, IoTHubMessages) => {
for (i of IoTHubMessages){
console.log('before settimeout')
await callIt()
console.log('after timeout')
}
console.log('outside all!')
};
myFnAwait(null, [0, 1])

Request ends before readstream events are handled

I'm attempting to make a nodejs function which reads back data from a file with the following code:
app.post('/DownloadData', function(req, res)
{
req.on('data', function(data) {
if (fs.existsSync('demoDataFile.dat')) {
var rstream = fs.createReadStream('demoDataFile.dat');
var bufs = [];
rstream.on('data', function(chunk) {
bufs.push(chunk);
console.log("data");
});
rstream.on('end', function() {
downbuf = Buffer.concat(bufs);
console.log(downbuf.length);
});
}
});
req.on('end', function() {
console.log("end length: " + downbuf.length);
res.end(downbuf);
});
req.on('error', function(err)
{
console.error(err.stack);
});
});
The problem is, the buffer comes back as empty as the req.on('end' ... is called before any of the rstream.on events ("data" and the length aren't printed in the console until after "end length: " has been printed). Am I handling the events wrong or is there some other issue? Any guidance would be appreciated.
Not sure why you're reading from req, because you're not using the body data at all. Also, because the data event can trigger multiple times, the code you're using to read the file may also get called multiple times, which probably isn't what you want.
Here's what I think you want:
app.post("/DownloadData", function(req, res) {
let stream = fs.createReadStream("demoDataFile.dat");
// Handle error regarding to creating/opening the file stream.
stream.on('error', function(err) {
console.error(err.stack);
res.sendStatus(500);
});
// Read the file data into memory.
let bufs = [];
stream.on("data", function(chunk) {
bufs.push(chunk);
console.log("data");
}).on("end", function() {
let downbuf = Buffer.concat(bufs);
console.log(downbuf.length);
...process the buffer...
res.end(downbuf);
});
});
You have to be aware that this will read the file into memory entirely. If it's a big file, it may require a lot of memory.
Since you don't specify which operations you have to perform on the file data, I can't recommend an alternative, but there are various modules available that can help you process file data in a streaming fashion (i.e. without having to read the file into memory entirely).

"Meteor code must always run within a Fiber" when calling Collection.insert on server

I have the following code in server/statusboard.js;
var require = __meteor_bootstrap__.require,
request = require("request")
function getServices(services) {
services = [];
request('http://some-server/vshell/index.php?type=services&mode=json', function (error, response, body) {
var resJSON = JSON.parse(body);
_.each(resJSON, function(data) {
var host = data["host_name"];
var service = data["service_description"];
var hardState = data["last_hard_state"];
var currState = data["current_state"];
services+={host: host, service: service, hardState: hardState, currState: currState};
Services.insert({host: host, service: service, hardState: hardState, currState: currState});
});
});
}
Meteor.startup(function () {
var services = [];
getServices(services);
console.log(services);
});
Basically, it's pulling some data from a JSON feed and trying to push it into a collection.
When I start up Meteor I get the following exception;
app/packages/livedata/livedata_server.js:781
throw exception;
^
Error: Meteor code must always run within a Fiber
at [object Object].withValue (app/packages/meteor/dynamics_nodejs.js:22:15)
at [object Object].apply (app/packages/livedata/livedata_server.js:767:45)
at [object Object].insert (app/packages/mongo-livedata/collection.js:199:21)
at app/server/statusboard.js:15:16
at Array.forEach (native)
at Function.<anonymous> (app/packages/underscore/underscore.js:76:11)
at Request._callback (app/server/statusboard.js:9:7)
at Request.callback (/usr/local/meteor/lib/node_modules/request/main.js:108:22)
at Request.<anonymous> (/usr/local/meteor/lib/node_modules/request/main.js:468:18)
at Request.emit (events.js:67:17)
Exited with code: 1
I'm not too sure what that error means. Does anyone have any ideas, or can suggest a different approach?
Just wrapping your function in a Fiber might not be enough and can lead to unexpected behavior.
The reason is, along with Fiber, Meteor requires a set of variables attached to a fiber. Meteor uses data attached to a fiber as a dynamic scope and the easiest way to use it with 3rd party api is to use Meteor.bindEnvironment.
T.post('someurl', Meteor.bindEnvironment(function (err, res) {
// do stuff
// can access Meteor.userId
// still have MongoDB write fence
}, function () { console.log('Failed to bind environment'); }));
Watch these videos on evented mind if you want to know more:
https://www.eventedmind.com/posts/meteor-dynamic-scoping-with-environment-variables
https://www.eventedmind.com/posts/meteor-what-is-meteor-bindenvironment
As mentioned above it is because your executing code within a callback.
Any code you're running on the server-side needs to be contained within a Fiber.
Try changing your getServices function to look like this:
function getServices(services) {
Fiber(function() {
services = [];
request('http://some-server/vshell/index.php?type=services&mode=json', function (error, response, body) {
var resJSON = JSON.parse(body);
_.each(resJSON, function(data) {
var host = data["host_name"];
var service = data["service_description"];
var hardState = data["last_hard_state"];
var currState = data["current_state"];
services+={host: host, service: service, hardState: hardState, currState: currState};
Services.insert({host: host, service: service, hardState: hardState, currState: currState});
});
});
}).run();
}
I just ran into a similar problem and this worked for me. What I have to say though is that I am very new to this and I do not know if this is how this should be done.
You probably could get away with only wrapping your insert statement in the Fiber, but I am not positive.
Based on my tests you have to wrap the insert in code I tested that is similar to the above example.
For example, I did this and it still failed with Fibers error.
function insertPost(args) {
if(args) {
Fiber(function() {
post_text = args.text.slice(0,140);
T.post('statuses/update', { status: post_text },
function(err, reply) {
if(reply){
// TODO remove console output
console.log('reply: ' + JSON.stringify(reply,0,4));
console.log('incoming twitter string: ' + reply.id_str);
// TODO insert record
var ts = Date.now();
id = Posts.insert({
post: post_text,
twitter_id_str: reply.id_str,
created: ts
});
}else {
console.log('error: ' + JSON.stringify(err,0,4));
// TODO maybe store locally even though it failed on twitter
// and run service in background to push them later?
}
}
);
}).run();
}
}
I did this and it ran fine with no errors.
function insertPost(args) {
if(args) {
post_text = args.text.slice(0,140);
T.post('statuses/update', { status: post_text },
function(err, reply) {
if(reply){
// TODO remove console output
console.log('reply: ' + JSON.stringify(reply,0,4));
console.log('incoming twitter string: ' + reply.id_str);
// TODO insert record
var ts = Date.now();
Fiber(function() {
id = Posts.insert({
post: post_text,
twitter_id_str: reply.id_str,
created: ts
});
}).run();
}else {
console.log('error: ' + JSON.stringify(err,0,4));
// TODO maybe store locally even though it failed on twitter
// and run service in background to push them later?
}
}
);
}
}
I thought this might help others encountering this issue. I have not yet tested calling the asynchy type of external service after internal code and wrapping that in a Fiber. That might be worth testing as well. In my case I needed to know the remote action happened before I do my local action.
Hope this contributes to this question thread.

Categories