Watson Assistant context is not updated - javascript

I use watson assistant v1
My problem is that every time I make a call to the code in Nodejs, where I return the context, to have a coordinated conversation, the context is only updated once and I get stuck in a node of the conversation
this is my code
client.on('message', message => {
//general variables
var carpetaIndividual = <../../../>
var cuerpoMensaje = <....>
var emisorMensaje = <....>
//detect if context exists
if(fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = require(carpetaIndividual+'/contexto.json');
var variableContexto = watsonContexto;
} else {
var variableContexto = {}
}
//conection with Watson Assistant
assistant.message(
{
input: { text: cuerpoMensaje },
workspaceId: '<>',
context: variableContexto,
})
.then(response => {
let messageWatson = response.result.output.text[0];
let contextoWatson = response.result.context;
console.log('Chatbot: ' + messageWatson);
//Save and create JSON file for context
fs.writeFile(carpetaIndividual+'/contexto.json', JSON.stringify(contextoWatson), 'utf8', function (err) {
if (err) {
console.error(err);
}
});
//Send messages to my application
client.sendMessage(emisorMensaje, messageWatson)
})
.catch(err => {
console.log(err);
});
}
client.initialize();
the context.json file is updated, but when it is read the code only reads the first update of the context.json file and not the other updates

This will be because you are using require to read the .json file. For all subsequent requires of an already-required file, the data is cached and reused.
You will need to use fs.readfile and JSON.parse
// detect if context exists
if (fs.existsSync(carpetaIndividual+'/contexto.json')) {
var watsonContexto = fs.readFileSync(carpetaIndividual+'/contexto.json');
// Converting to JSON
var variableContexto = JSON.parse(watsonContexto);
} else {
var variableContexto = {}
}
There is another subtle problem with your code, in that you are relying on
your async call to fs.writeFile completing before you read the file. This will be the case most of the time, but as you don't wait for the fs.writeFile to complete there is the chance that you may try to read the file, before it is written.

Related

AWS Lambda function global variables

I am writing an AWS Lambda function in JavaScript (Node.js) that interacts with CodeCommit through the AWS SDK.
The communication between the services works as expected, I am getting data within the CodeCommit function, but the issue I am facing appears when I want to use this data outside of the function.
I have tried two approaches:
1. Global Variable
Code:
var aws = require('aws-sdk');
var codecommit = new aws.CodeCommit({ apiVersion: '2015-04-13' });
var repoName = ''; // Declared my global variable here
exports.handler = function(event, context) {
var commitId = "69a5f8eeba340d71ba41b8f20d77cc20b301ff52"
var repository = "my-repository"
var params = {
repositoryName: repository
};
codecommit.getRepository(params, function(err, data) {
if (err) {
console.log(err);
var message = "Error getting repository metadata for repository " + repository;
console.log(message);
context.fail(message);
} else {
console.log('Repository Name:', data.repositoryMetadata.repositoryName); // Shown with data
repoName = data.repositoryMetadata.repositoryName; // Setting the variable
console.log('Account Id:', data.repositoryMetadata.accountId); // Shown with data
}
});
console.log(repoName); // Shown as blank in the output
};
Output:
The last written "console.log" is the first to print in the execution results, but shows blank. The two other console.log (within the functions) are then printed, and they show the data.
2. Function
Code:
var aws = require('aws-sdk');
var codecommit = new aws.CodeCommit({ apiVersion: '2015-04-13' });
exports.handler = function(event, context) {
var commitId = "69a5f8eeba340d71ba41b8f20d77cc20b301ff52"
var repository = "my-repository"
var repoData = getRepository(repository)
console.log('Repository Name:', repoData.repositoryName);
console.log('Account Id:', repoData.accountId);
};
function getRepository(repository) {
var params = {
repositoryName: repository
};
codecommit.getRepository(params, function(err, data) {
if (err) {
console.log(err);
var message = "Error getting repository metadata for repository " + repository;
console.log(message);
context.fail(message);
} else {
var repoData = {};
repoData.repositoryName = data.repositoryMetadata.repositoryName;
repoData.accountId = data.repositoryMetadata.accountId;
console.log(repoData); // Shows output in execution results when lines 11 & 12 are commented
return repoData;
}
});
}
Output:
{
"errorType": "TypeError",
"errorMessage": "Cannot read property 'repositoryName' of undefined",
"trace": [
"TypeError: Cannot read property 'repositoryName' of undefined",
" at Runtime.exports.handler (/var/task/index.js:57:46)",
" at Runtime.handleOnce (/var/runtime/Runtime.js:66:25)"
]
}
Conclusion
None of those approaches worked. The data is always visible within the function but never outside of it. I suspect that the code outside of the function executes before the function itself, and I wonder if I could make the code to wait that the function has been executed before doing the console.log (and other actions after it). Or maybe I am wrong on another level?
You are using a callback model, in which case the console.log in the first example is being hit before the code in the callback. A better option would be to use async/await.
var aws = require('aws-sdk');
var codecommit = new aws.CodeCommit({ apiVersion: '2015-04-13' });
var repoName = ''; // Declared my global variable here
exports.handler = async function(event, context) {
var commitId = "69a5f8eeba340d71ba41b8f20d77cc20b301ff52"
var repository = "my-repository"
var params = {
repositoryName: repository
};
var data = await codecommit.getRepository(params).promise();
console.log('Repository Name:', data.repositoryMetadata.repositoryName); // Shown with data
repoName = data.repositoryMetadata.repositoryName; // Setting the variable
console.log('Account Id:', data.repositoryMetadata.accountId); // Shown with data
console.log(repoName);
};
Notice that I'm not catching the error here, but if you wanted to you can use a try/catch block. Just be sure you throw a new error in that case if you want the function to fail.

Node.js how to return an array from within a asynchronous callback for use in another file

File called testing.js
I can do whatever I like with the data in saveWeatherData but cannot call this function and return the data without getting 'undefined'
For example if i tried the below code in saveWeatherData it will print out the summary as expected...
console.log(The summary of the weather today is: ${dataArray[0]});
However I want to use these values within another file such as a server file that when connected to will display weather summary temperature etc.
So I need to return an array with these values in it so that I can call this function and get my data stored in an array for further use.
I know that the reason the array --dataArray is returning undefined is because asynchronous code.
The array is returned before we have gotten the data using the callback.
My question, is there anyway to do what I am trying to do?
I tried my best to explain the problem and what I want to do, hopefully its understandable.
Would I have to use a callback inside of a callback? To callback here to return the data when its been fetched?
I just cant get my head about it and have tried multiple things to try and get the result I am looking for.
My last idea and something i would prefer not to do is the use the 'fs' module to save the data to a text or json file for use in my other files through reading the data from the saved file...
I feel im close but cant get over the last hurdle, so ive decided to ask for a little help, even just point me on the right track and Ill continue to try and figure it out.
Phew...
Thank you for your time!
const request = require("request");
let dataArray = [];
let saveWeatherData = function(weatherData) {
dataArray = weatherData;
return dataArray;
};
let getWeatherData = function(callback) {
request({
url: `https://api.forecast.io/forecast/someexamplekey/1,-1`,
json: true
}, (error, response, body) => {
//Creating array to hold weather data until we can save it using callback...
let array = [];
if (error) {
console.log("Unable to connect with Dark Sky API servers.")
}
else {
console.log(`Successfully connected to Dark Sky API servers!\n`);
array.push(body.currently.summary, body.currently.temperature, body.currently.apparentTemperature, body.currently.windSpeed, body.currently.windBearing);
callback(array);
}
});
};
getWeatherData(saveWeatherData);
module.exports = {
saveWeatherData
};
My Other File...
File called server.js
const http = require("http");
const testing = require("./testing");
function onRequest(request, response){
let data = testing.saveWeatherData();
console.log(`A user made a request: ${request.url}`);
response.writeHead(200, {"context-type": "text/plain"});
response.write("<!DOCTYPE html>");
response.write("<html>");
response.write("<head>");
response.write("<title>Weather</title>");
response.write("</head>");
response.write("<body>");
response.write("Weather summary for today: " + data[0]);
response.write("</body>");
response.write("</html>");
response.end();
}
http.createServer(onRequest).listen(8888);
console.log("Server is now running on port 8888...");
I'm still not sure about what are you trying to do. However, I think you're not exporting what you suppose to be exporting. To avoid the use of so many callbacks you may use async/await.
Change this part of your server.js
async function onRequest(request, response) {
let data = await testing.getWeatherData();
console.log(`A user made a request: ${request.url}`);
response.writeHead(200, { 'context-type': 'text/plain' });
response.write('<!DOCTYPE html>');
response.write('<html>');
response.write('<head>');
response.write('<title>Weather</title>');
response.write('</head>');
response.write('<body>');
response.write('Weather summary for today: ' + data[0]);
response.write('</body>');
response.write('</html>');
response.end();
}
And this of your testing.
let getWeatherData = function() {
return new Promise(resolve =>
request(
{
url: `https://api.darksky.net/forecast/someexamplekey/1,-1`,
json: true
},
(error, response, body) => {
//Creating array to hold weather data until we can save it using callback...
let array = [];
if (error) {
console.log('Unable to connect with Dark Sky API servers.');
} else {
console.log(`Successfully connected to Dark Sky API servers!\n`);
array.push(
body.currently.summary,
body.currently.temperature,
body.currently.apparentTemperature,
body.currently.windSpeed,
body.currently.windBearing
);
resolve(array);
}
}
)
);
};
module.exports = {
getWeatherData
};
It will check for new Weather in each request. If you want to save the result to avoid checking every single time you might need to do something else. But I think for a weather app the important is to keep it updated.

Passing HTML input file as the parameter of Firebase Cloud Function

I am pretty new this area and I started firebase cloud function 2 days ago.
Sorry, I am still a student so I might not understand clearly some documentation.
I tried to figure out how the parameter is passed from my client-side javascript to firebase cloud function.
my cloud function
exports.OCR = functions.https.onCall((req) => {
const vision = require('#google-cloud/vision');
// Creates a client
const client = new vision.ImageAnnotatorClient();
console.log(req);
// Performs label detection on the image file
client
.documentTextDetection(req)
.then((results) => {
console.log("Entered");
console.log(req);
const fullTextAnnotation = results[0].fullTextAnnotation;
console.log(fullTextAnnotation.text);
return results[0].fullTextAnnotation.text;
})
.catch(err => {
console.error('ERROR:', err);
return "error";
});
})
I am using firebase cloud function and Google Vision API.
actually I tried to pass the parameter like this
My client side coe
document.getElementById("fileInput").click();
var file = document.getElementById("fileInput");
var fileInput = document.getElementById('fileInput');
fileInput.addEventListener('change', function (e) {
var file = e.target.files[0];
// Do something with the image file.
var tmppath = URL.createObjectURL(file);
console.log(file);
console.log(tmppath);
//var url = "https://firebasestorage.googleapis.com/v0/b/recette-f3ef5.appspot.com/o/FB1.gif?alt=media&token=28727220-181c-440e-87ae-4808b5c9ba28";
OCR(file)
.then(function(result) {
console.log(result);
}).catch(function(err) {
console.log(err);
});
});
and it did not work. I always got null return when I trigger the function.
So, my question is that how can I pass the file (HTML INPUT TAG) to my cloud function?
p.s: when I tried the code with node the_code.js it works.
According to the Google Cloud Node.js library documentation the documentTextDetection function should receive a JS object like this:
var image = {
source: {imageUri: 'gs://path/to/image.jpg'}
};
vision.documentTextDetection(image).then(response => {
// doThingsWith(response);
}).catch(err => {
console.error(err);
});
The file you are passing to OCR function has probably a different structure than that defined in documentation.
There are some variants to this:
If the key is source, the value should be another object containing
imageUri or filename as a key and a string as a value.
If the key is content, the value should be a Buffer.
So your code should look something like this.
console.log(tmppath);
//var url = "https://firebasestorage.googleapis.com/v0/b/recette-f3ef5.appspot.com/o/FB1.gif?alt=media&token=28727220-181c-440e-87ae-4808b5c9ba28";
image = {source: {imageUri: 'https://firebasestorage.googleapis.com/v0/b/recette-f3ef5.appspot.com/o/FB1.gif?alt=media&token=28727220-181c-440e-87ae-4808b5c9ba28'}}
OCR(image)
Please provide complete error messages and description of what is file..

Node.js - Organising code and closures - SFTP/Inotify

I was hoping I could get some advice on why my nodejs program is behaving in the way it is.
I am using two modules, node-sftp and node-inotify. I have setup node-inotify to watch a directory and call a function when something is written there, the function being an sftp upload.
Now the problem I have is that processing one file at a time is fine but when I drop 4 files in one go there, the function is called four times but only one sftp upload goes through.
Do I need to order my code in a particular way to ensure that the sftp upload occurs x times, is this something to do with closures perhaps?
This is a basic version of my code...
"event_handler" is called when something happens on a "watched" directory
"check_event" figures out if this type of event is one we want, in this case it's a "write"
"ftp_to_server" prepare connection details
"do_ftp" basically uses the node-sftp module to perform the sftp upload
event_handler = function(event){
var supplier;
check_event(event, supplier, type, ftp_to_server);
};
=================
function check_event(event, handler)
{
if (event.type === 'xxxxxx') {
var file_to_process_name = 'abc';
var file_to_process_dir = 'abc';
var remote_dir = 'abc';
handler(file_to_process_name, file_to_process_dir, remote_dir);
}
}
function ftp_to_server(file_to_process_name, file_to_process_dir, remote_dir) {
var connection_details = conf.ftp.connections
do_ftp(connection_details, file_to_process_name, file_to_process_dir, remote_dir);
}
function do_ftp(connection_details, file_to_process_name, file_to_process_dir, remote_dir) {
var credentials = {
// FTP settings here
};
var local_file = file_to_process_dir + file_to_process_name;
var remote_file = remote_dir + file_to_process_name;
connection = new sftp(credentials, function(err) {
if (err){
throw err;
}
connection.writeFile(remote_file, fs.readFileSync(local_file, "utf8"), null, function(err) {
if (err) {
throw err;
}
console.info('FTP PUT DONE');
});
});
};
Your "connection = new sftp(credentials, function(err) {"
should be
var connection = new sftp(credentials, function(err) {
The way you currently have it coded, "connection" is a global and you are writing over it.

Writing to files in Node.js

I've been trying to find a way to write to a file when using Node.js, but with no success. How can I do that?
There are a lot of details in the File System API. The most common way is:
const fs = require('fs');
fs.writeFile("/tmp/test", "Hey there!", function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
// Or
fs.writeFileSync('/tmp/test-sync', 'Hey there!');
Currently there are three ways to write a file:
fs.write(fd, buffer, offset, length, position, callback)
You need to wait for the callback to ensure that the buffer is written to disk. It's not buffered.
fs.writeFile(filename, data, [encoding], callback)
All data must be stored at the same time; you cannot perform sequential writes.
fs.createWriteStream(path, [options])
Creates a WriteStream, which is convenient because you don't need to wait for a callback. But again, it's not buffered.
A WriteStream, as the name says, is a stream. A stream by definition is “a buffer” containing data which moves in one direction (source ► destination). But a writable stream is not necessarily “buffered”. A stream is “buffered” when you write n times, and at time n+1, the stream sends the buffer to the kernel (because it's full and needs to be flushed).
In other words: “A buffer” is the object. Whether or not it “is buffered” is a property of that object.
If you look at the code, the WriteStream inherits from a writable Stream object. If you pay attention, you’ll see how they flush the content; they don't have any buffering system.
If you write a string, it’s converted to a buffer, and then sent to the native layer and written to disk. When writing strings, they're not filling up any buffer. So, if you do:
write("a")
write("b")
write("c")
You're doing:
fs.write(new Buffer("a"))
fs.write(new Buffer("b"))
fs.write(new Buffer("c"))
That’s three calls to the I/O layer. Although you're using “buffers”, the data is not buffered. A buffered stream would do: fs.write(new Buffer ("abc")), one call to the I/O layer.
As of now, in Node.js v0.12 (stable version announced 02/06/2015) now supports two functions:
cork() and
uncork(). It seems that these functions will finally allow you to buffer/flush the write calls.
For example, in Java there are some classes that provide buffered streams (BufferedOutputStream, BufferedWriter...). If you write three bytes, these bytes will be stored in the buffer (memory) instead of doing an I/O call just for three bytes. When the buffer is full the content is flushed and saved to disk. This improves performance.
I'm not discovering anything, just remembering how a disk access should be done.
You can of course make it a little more advanced. Non-blocking, writing bits and pieces, not writing the whole file at once:
var fs = require('fs');
var stream = fs.createWriteStream("my_file.txt");
stream.once('open', function(fd) {
stream.write("My first row\n");
stream.write("My second row\n");
stream.end();
});
Synchronous Write
fs.writeFileSync(file, data[, options])
fs = require('fs');
fs.writeFileSync("foo.txt", "bar");
Asynchronous Write
fs.writeFile(file, data[, options], callback)
fs = require('fs');
fs.writeFile('foo.txt', 'bar', (err) => { if (err) throw err; });
Where
file <string> | <Buffer> | <URL> | <integer> filename or file descriptor
data <string> | <Buffer> | <Uint8Array>
options <Object> | <string>
callback <Function>
Worth reading the offical File System (fs) docs.
Update: async/await
fs = require('fs');
util = require('util');
writeFile = util.promisify(fs.writeFile);
fn = async () => { await writeFile('foo.txt', 'bar'); }
fn()
var path = 'public/uploads/file.txt',
buffer = new Buffer("some content\n");
fs.open(path, 'w', function(err, fd) {
if (err) {
throw 'error opening file: ' + err;
}
fs.write(fd, buffer, 0, buffer.length, null, function(err) {
if (err) throw 'error writing file: ' + err;
fs.close(fd, function() {
console.log('file written');
})
});
});
The answers provided are dated and a newer way to do this is:
const fsPromises = require('fs').promises
await fsPromises.writeFile('/path/to/file.txt', 'data to write')
see documents here for more info
I liked Index of ./articles/file-system.
It worked for me.
See also How do I write files in node.js?.
fs = require('fs');
fs.writeFile('helloworld.txt', 'Hello World!', function (err) {
if (err)
return console.log(err);
console.log('Wrote Hello World in file helloworld.txt, just check it');
});
Contents of helloworld.txt:
Hello World!
Update:
As in Linux node write in current directory , it seems in some others don't, so I add this comment just in case :
Using this ROOT_APP_PATH = fs.realpathSync('.'); console.log(ROOT_APP_PATH); to get where the file is written.
I know the question asked about "write" but in a more general sense "append" might be useful in some cases as it is easy to use in a loop to add text to a file (whether the file exists or not). Use a "\n" if you want to add lines eg:
var fs = require('fs');
for (var i=0; i<10; i++){
fs.appendFileSync("junk.csv", "Line:"+i+"\n");
}
OK, it's quite simple as Node has built-in functionality for this, it's called fs which stands for File System and basically, NodeJS File System module...
So first require it in your server.js file like this:
var fs = require('fs');
fs has few methods to do write to file, but my preferred way is using appendFile, this will append the stuff to the file and if the file doesn't exist, will create one, the code could be like below:
fs.appendFile('myFile.txt', 'Hi Ali!', function (err) {
if (err) throw err;
console.log('Thanks, It\'s saved to the file!');
});
You may write to a file using fs (file system) module.
Here is an example of how you may do it:
const fs = require('fs');
const writeToFile = (fileName, callback) => {
fs.open(fileName, 'wx', (error, fileDescriptor) => {
if (!error && fileDescriptor) {
// Do something with the file here ...
fs.writeFile(fileDescriptor, newData, (error) => {
if (!error) {
fs.close(fileDescriptor, (error) => {
if (!error) {
callback(false);
} else {
callback('Error closing the file');
}
});
} else {
callback('Error writing to new file');
}
});
} else {
callback('Could not create new file, it may already exists');
}
});
};
You might also want to get rid of this callback-inside-callback code structure by useing Promises and async/await statements. This will make asynchronous code structure much more flat. For doing that there is a handy util.promisify(original) function might be utilized. It allows us to switch from callbacks to promises. Take a look at the example with fs functions below:
// Dependencies.
const util = require('util');
const fs = require('fs');
// Promisify "error-back" functions.
const fsOpen = util.promisify(fs.open);
const fsWrite = util.promisify(fs.writeFile);
const fsClose = util.promisify(fs.close);
// Now we may create 'async' function with 'await's.
async function doSomethingWithFile(fileName) {
const fileDescriptor = await fsOpen(fileName, 'wx');
// Do something with the file here...
await fsWrite(fileDescriptor, newData);
await fsClose(fileDescriptor);
}
You can write to files with streams.
Just do it like this:
const fs = require('fs');
const stream = fs.createWriteStream('./test.txt');
stream.write("Example text");
var fs = require('fs');
fs.writeFile(path + "\\message.txt", "Hello", function(err){
if (err) throw err;
console.log("success");
});
For example : read file and write to another file :
var fs = require('fs');
var path = process.cwd();
fs.readFile(path+"\\from.txt",function(err,data)
{
if(err)
console.log(err)
else
{
fs.writeFile(path+"\\to.text",function(erro){
if(erro)
console.log("error : "+erro);
else
console.log("success");
});
}
});
Here we use w+ for read/write both actions and if the file path is not found then it would be created automatically.
fs.open(path, 'w+', function(err, data) {
if (err) {
console.log("ERROR !! " + err);
} else {
fs.write(data, 'content', 0, 'content length', null, function(err) {
if (err)
console.log("ERROR !! " + err);
fs.close(data, function() {
console.log('written success');
})
});
}
});
Content means what you have to write to the file and its length, 'content.length'.
Here is the sample of how to read file csv from local and write csv file to local.
var csvjson = require('csvjson'),
fs = require('fs'),
mongodb = require('mongodb'),
MongoClient = mongodb.MongoClient,
mongoDSN = 'mongodb://localhost:27017/test',
collection;
function uploadcsvModule(){
var data = fs.readFileSync( '/home/limitless/Downloads/orders_sample.csv', { encoding : 'utf8'});
var importOptions = {
delimiter : ',', // optional
quote : '"' // optional
},ExportOptions = {
delimiter : ",",
wrap : false
}
var myobj = csvjson.toSchemaObject(data, importOptions)
var exportArr = [], importArr = [];
myobj.forEach(d=>{
if(d.orderId==undefined || d.orderId=='') {
exportArr.push(d)
} else {
importArr.push(d)
}
})
var csv = csvjson.toCSV(exportArr, ExportOptions);
MongoClient.connect(mongoDSN, function(error, db) {
collection = db.collection("orders")
collection.insertMany(importArr, function(err,result){
fs.writeFile('/home/limitless/Downloads/orders_sample1.csv', csv, { encoding : 'utf8'});
db.close();
});
})
}
uploadcsvModule()
fs.createWriteStream(path[,options])
options may also include a start option to allow writing data at some position past the beginning of the file. Modifying a file rather than replacing it may require a flags mode of r+ rather than the default mode w. The encoding can be any one of those accepted by Buffer.
If autoClose is set to true (default behavior) on 'error' or 'finish' the file descriptor will be closed automatically. If autoClose is false, then the file descriptor won't be closed, even if there's an error. It is the application's responsibility to close it and make sure there's no file descriptor leak.
Like ReadStream, if fd is specified, WriteStream will ignore the path argument and will use the specified file descriptor. This means that no 'open' event will be emitted. fd should be blocking; non-blocking fds should be passed to net.Socket.
If options is a string, then it specifies the encoding.
After, reading this long article. You should understand how it works.
So, here's an example of createWriteStream().
/* The fs.createWriteStream() returns an (WritableStream {aka} internal.Writeable) and we want the encoding as 'utf'-8 */
/* The WriteableStream has the method write() */
fs.createWriteStream('out.txt', 'utf-8')
.write('hello world');
Point 1:
If you want to write something into a file.
means: it will remove anything already saved in the file and write the new content. use fs.promises.writeFile()
Point 2:
If you want to append something into a file.
means: it will not remove anything already saved in the file but append the new item in the file content.then first read the file, and then add the content into the readable value, then write it to the file. so use fs.promises.readFile and fs.promises.writeFile()
example 1: I want to write a JSON object in my JSON file .
const fs = require('fs');
const data = {table:[{id: 1, name: 'my name'}]}
const file_path = './my_data.json'
writeFile(file_path, data)
async function writeFile(filename, writedata) {
try {
await fs.promises.writeFile(filename, JSON.stringify(writedata, null, 4), 'utf8');
console.log('data is written successfully in the file')
}
catch (err) {
console.log('not able to write data in the file ')
}
}
example2 :
if you want to append data to a JSON file.
you want to add data {id:1, name:'my name'} to file my_data.json on the same folder root. just call append_data (file_path , data ) function.
It will append data in the JSON file if the file existed . or it will create the file and add the data to it.
const fs = require('fs');
const data = {id: 2, name: 'your name'}
const file_path = './my_data.json'
append_data(file_path, data)
async function append_data(filename, data) {
if (fs.existsSync(filename)) {
var read_data = await readFile(filename)
if (read_data == false) {
console.log('not able to read file')
} else {
read_data.table.push(data) //data must have the table array in it like example 1
var dataWrittenStatus = await writeFile(filename, read_data)
if (dataWrittenStatus == true) {
console.log('data added successfully')
} else {
console.log('data adding failed')
}
}
}
}
async function readFile(filePath) {
try {
const data = await fs.promises.readFile(filePath, 'utf8')
return JSON.parse(data)
}
catch (err) {
return false;
}
}
async function writeFile(filename, writedata) {
try {
await fs.promises.writeFile(filename, JSON.stringify(writedata, null, 4), 'utf8');
return true
}
catch (err) {
return false
}
}
You can use library easy-file-manager
install first from npm
npm install easy-file-manager
Sample to upload and remove files
var filemanager = require('easy-file-manager')
var path = "/public"
var filename = "test.jpg"
var data; // buffered image
filemanager.upload(path,filename,data,function(err){
if (err) console.log(err);
});
filemanager.remove(path,"aa,filename,function(isSuccess){
if (err) console.log(err);
});
You can write in a file by the following code example:
var data = [{ 'test': '123', 'test2': 'Lorem Ipsem ' }];
fs.open(datapath + '/data/topplayers.json', 'wx', function (error, fileDescriptor) {
if (!error && fileDescriptor) {
var stringData = JSON.stringify(data);
fs.writeFile(fileDescriptor, stringData, function (error) {
if (!error) {
fs.close(fileDescriptor, function (error) {
if (!error) {
callback(false);
} else {
callback('Error in close file');
}
});
} else {
callback('Error in writing file.');
}
});
}
});

Categories