Inserting Large excel's into MongoDB with Node.js and async.queue - javascript

I am trying to upload and insert large excel files (100K's of rows; 10-100M+) into mongo.
var cv_json = require('convert-json');
cv_json({
// now supporting csv, xls, xlsx, xml format
input: '/home/local/ASSYST-COC/sobharani/SMMC/DART - Sample Data File.xls',
output: null
}, function (err, result) {
if (err) {
console.error(err);
} else {
console.log(result);
}
});
This is working only small amount of data .how to use async.queue.
I tried this approach. But it's taking more time to load 4L data (around 1
hour).But I need to load more than 4L+ data.I want this data to be store in seconds or 5 to 10 min.
var cv_json = require('convert-json');
var async = require('async');
var MongoClient = require('mongodb').MongoClient; //require mongodb
MongoClient.connect('mongodb://127.0.0.1/ee', function (err, db) {
if (err) throw err;
var collection = db.collection('csvdata');
var queue = async.queue(collection.insert.bind(collection), 200);
console.time("queryTime"); //time start
cv_json({
// now supporting csv, xls, xlsx, xml format
input: '/home/local/ASSYST-COC/sobharani/Desktop/JSONto.csv',
output: null
}, function (err, result) {
if (err) {
console.error(err);
} else {
queue.push(result, function (err, res) {
if (err) throw err;
});
}
});
queue.drain = function () {
console.log('all items have been processed');
console.timeEnd("queryTime"); //end time
db.close();
}
console.timeEnd("queryTime"); //end time
});

Every insert is going to generate a synchronous call to the database.
Mongo locks the table for each insert, which slows even more the process.
I'd recommend to batch insert (many documents inserted at once) by providing an array of documents to the collection.insert method.
Cf. http://docs.mongodb.org/manual/reference/method/db.collection.insert/#db.collection.insert

You should use mongo batch insert. Batch insert support no more than 1000 write operations so you will need to use a bulk. This is an example (in coffeescript)
#Mongo cannot do more than 1000 batch insert, bulk will divide it
bulk = yourModel.collection.initializeUnorderedBulkOp()
for data in veryBigArray
bulk.insert data
bulk.execute (err) ->
if err? then console.log(err) else
console.log "INSERTING %s DATA IN MONGO", veryBigArray.length

The first step is to determine whether the time is being consumed by the conversion to json or the load into MongoDB. So try commenting out the step that inserts the result into MongoDB and see whether it still takes a long time just to do the conversion.

Related

Comparing two text files - and save difference to new file

I'm currently trying to baffle my head through this - so the scenario is:
1. Download textFile1 from FTP
2. Download textFile2 from FTP
3. Read data and compare in textFile1&2 from local files on PC
4. Save differences (like full lines) only to textFile 3
I've tried doing fs.readFile on it - and compare fileDiff (npm diff); however i am a bit unsure on how to handle so it only saves the actual difference between 1&2 - to a third text file.
See it as a "compare & save"
Hope someone have any ideas on how to achieve this, its a bit hard to explain.
// Update
Code snippet (Or look below): https://jsfiddle.net/menix/py3eqnfL/
const ServerLog = "ServerDM/TheIsle/Saved/Logs/TheIsle.log";
const LocalSaveOld = "TheIsleData/TheIsle.log";
const LocalSaveNew = "TheIsleData/TheIsleNew.log";
async function example() {
const client = new ftp.Client();
client.ftp.verbose = true;
try {
await client.access({
host: config.ftpHost,
port: config.ftpPort,
user: config.ftpUser,
password: config.ftpPassword,
secure: config.ftpSecure,
});
await client.downloadTo(LocalSaveOld, ServerLog);
if (fs.existsSync(LocalSaveOld)) {
fs.readFile(LocalSaveOld, function (err, data1) {
console.log(data1);
fs.readFile(LocalSaveNew, function (err, data2) {
console.log(data2);
var difference = fileDiff.diffLines(data1, data2);
console.log(difference);
});
});
await fs.rename(LocalSaveOld, LocalSaveNew, function (err) {
console.log("old was copied to new");
if (err) console.log("ERROR: " + err);
});
}
} catch (error) {
console.log("FTP ERROR");
client.close();
}
}
This the code i currently have played with, it most likely needs recode obviously.
What my thoughts is first it downloads the ServerLog file through FTP to PC (It saves it as LocalSaveOld on pc) - then it should redownload ServerLog again after 5 minutes, save it as LocalSaveNew
Then i wanna compare the LocalSaveOld with the LocalSaveNew - and have the "new data" in the 5 minute period saved to a third file (This is what i am really lost on)
I figured out how to download ServerLog from FTP, save it as LocalSaveOld.
I figured out how to rename the LocalSaveOld to LocalSaveNew
I need a way to "redownload" & "compare" it each 5 minutes or so, for new data .. And save it to a seperate text file or something; this bit is missing in the code (Lets call this NewLogData)
A Friend here has a working Python example; but obviously mine is done in Javascript - I wanna achieve same result
const string1 = fs.readFileSync(LocalSaveOld, { encoding: "utf8" });
const string2 = fs.readFileSync(LocalSaveNew, { encoding: "utf8" });
const diff = (diffMe, diffBy) => diffMe.split(diffBy).join("");
const finalString = diff(string2, string1);
fs.writeFile("./TheIsleData/diff.log", finalString, function (err) {
if (err) return console.log(err);
console.log("written");
Was the answer to compare - and i have it fully working now!

Loop through all the images inside directory and convert to base64

I am trying to loop through all the images in my folder convert it into base64 and send to MongoDB.
I started with one image, worked fine.
var filename = '1500.jpg';
var binarydata = fs.readFileSync(filename);
var converted = new Buffer(binarydata).toString("base64");
console.log(converted);
The above code gives me base64 for one file.
I tried changing the code so that it will loop through all the files in my directory and give me base64 for each file.
here is what I wrote but it did not work;
var variantfolder = './variantimages';
fs.readdir(variantfolder, function(err, files){
if (err) {
console.log(err);
}
else {
fs.readFileSync(files, function(err, res){
if (err){console.log('err')} else {
var converted = new Buffer(res).toString("base64");
var onevariant = {
"imagename":files,
"imagebase64":converted
}
var newvariant = new Variant(onevariant)
newvariant.save(err, newvar){
if (err) {
console.log('err');
}
else {
console.log('saved to mongo');
}
}
}
})
}
})
I suspect the problem will be related to you calling functions in the wrong ways.
Check the inputs and outputs of the functions you are using.
The fs.readdir() function callback is passed 2 parameters, an error and an array of file names.
The fs.readFileSync() function takes the parameters path and options. It also returns the file contents, it doesn't take a callback. The callback version is fs.readFile().
So in your code you are passing an array of file names into the file path parameter, which will not work.
You can also pass base64 as the encoding when reading the file and you won't have to convert it after.
I expect you will want something more along these lines (add your own error handling as required):
fs.readdir(variantfolder, (err, fileNames) => {
fileNames.forEach((fileName) => {
fs.readFile(`${variantfolder}/${fileName}`, 'base64', (err, base64Data) => {
// Do your thing with the file data.
});
});
});
Note that you can use the async, sync or promise (fs.promises) version of the fs functions depending on what is most suitable for your code.

How can I put many script-generated records in PostgreSQL quickly from node.js?

The following code, which inserts 200,000 records into PostgreSQL server from node.js, is taking about 17 minutes on my laptop PC, which feels horribly slow.
var pg = require('pg');
var Client = pg.Client;
var async = require('async');
var client = new Client(connectionString);
client.connect();
var rollback = function(client) {
client.query('ROLLBACK', function() {
client.end();
process.kill();
});
};
client.query('BEGIN',function(err,result){
if(err){ console.error(err); rollback(client);};
async.waterfall([
function(cb){
client.query('DROP INDEX idx',function(err,result){
client.query('TRUNCATE TABLE tbl',function(err,result){
async.forEach(values,function(value,valueNext){
client.query('INSERT INTO tbl ('
+ 'col1,'
+ 'col2) VALUES ($1,$2)',[
value,
generatedSomething(value)
],function(err){
valueNext();
});
},function(err,result){
if(err){ console.error(err); rollback(client);cb(false);return;};
client.query('CREATE INDEX idx ON tbl',function(err,result){
cb(null);
});
});
});
});
});
},
],function(err){
client.query('COMMIT', client.end.bind(client));
});
There are some strategies I've applied to speed up.
Drop all indices before insertion, create it after all insertion is done ... ok
Use TRUNCATE TABLE instead of DELETE FROM ... ok
Use COPY FROM instead of INSERT INTO ... not done
It seems that using COPY FROM instead of INSERT INTO will make effect, but it's used for imported CSV files, not for script-generated values.
So, does it mean that exporting script-generated values to temporary CSV file, and importing values using COPY FROM, is the most effictive way to insert values into PostgreSQL quickly?
copyFrom will return a WritableStream that you can append your values to as CSV, like:
var stream = client.copyFrom("COPY tbl (col1, col2) FROM STDIN WITH CSV");
stream.on('close', function() {
client.query("COMMIT");
});
stream.on('error', rollback);
async.forEach(values, function(value, valueNext) {
stream.write(value + "," + generatedSomething(value) + "\n");
});
stream.end();
Of course you will need to properly escape your values

node.js never exits after insert to couchbase, opposite of most node questions

My problem seems to be the opposite of every node.js question :-) I have a simple forEach loop to read a list of files and insert them into a Couchbase database. This works great, but it never exits after reading all the lines. So I added a counter to shutdown the couchbase connection after all inserts are complete. This works.
This process is intended to load hundreds of thousands of files, so I brought the async module into the mix to batch the inserts into groups of 100. The async.eachLimit is used to iterate over the array and insert documents in batches. Now the orig problem is back. Whatever magic async.eachLimit uses to recognize the process is complete is not happening.
I've been going through javascript scoping, callbacks, async, etc. Google searches are hitting keywords but not this issue. I've reduced the code down to the following testcase. To test, create three files and add their names to testlist.txt.
The async.eachLimit in place works up until it hits the limit, then hangs. Comment this out and uncomment array.forEach line and it works. Thanks in advance!
var fs = require('fs');
var couchbase = require('couchbase');
var async = require('async');
var filelist = 'testlist.txt';
var key_count = 0;
var cb_config = { host: 'localhost:8091', bucket: 'default'};
var db = new couchbase.Connection(cb_config, function(err) {
if (err) {
console.log('ERRR connect to couchbase at config['+cb_config+']');
throw err;
}
});
var insertFile=function(line) {
console.log('LOAD ['+line+']');
fs.readFile(line, function(file_err, f_doc) {
if(file_err) throw file_err;
db.set(line, f_doc, function(db_err, db_res){
if (db_err) {
console.log('FAIL ['+line+'] err['+db_err+']');
} else {
console.log('PASS ['+line+']');
}
key_count--;
if (key_count == 0) {
console.log('DONE Shutting down client, no more keys');
db.shutdown();
}
});
});
}
// read list of files into data array from file filelist
fs.readFile(filelist, function(filelist_err, lines) {
if(filelist_err) throw filelist_err;
// HACK split adds empty line to array, use replace to fix
var array = lines.toString().replace(/\n$/, '').split('\n');
key_count = array.length;
console.log('INIT lines['+key_count+']');
async.eachLimit(array, 2, insertFile, function(err) { console.log('FAIL async err['+err+']');} );
//array.forEach(function(data){insertFile(data);return;});
});
Testcase output using array.forEach:
INIT lines[3]
LOAD [files.big.txt]
LOAD [files.little.txt]
LOAD [files.txt]
PASS [files.little.txt]
PASS [files.big.txt]
PASS [files.txt]
DONE Shutting down client, no more keys
Testcase output using async.eachLimit:
INIT lines[3]
LOAD [files.big.txt]
LOAD [files.little.txt]
PASS [files.little.txt]
PASS [files.big.txt]
... hang, never gets to 3...
After review with a coworker, they spotted my mistake. I missed the async callback in my insertFile function. Adding that in works and allows me to remove the key counter! Solution code below:
var fs = require('fs');
var couchbase = require('couchbase');
var async = require('async');
var filelist = 'testlist.txt';
var key_count = 0;
var cb_config = { host: 'localhost:8091', bucket: 'default'};
var db = new couchbase.Connection(cb_config, function(err) {
if (err) {
console.log('ERRR connect to couchbase at config['+cb_config+']');
throw err;
}
});
var insertFile=function(line, callback) {
console.log('LOAD ['+line+']');
fs.readFile(line, function(file_err, f_doc) {
if(file_err) throw file_err;
db.set(line, f_doc, function(db_err, db_res){
if (db_err) {
console.log('FAIL ['+line+'] err['+db_err+']');
callback(db_err);
} else {
console.log('PASS ['+line+']');
callback();
}
});
});
}
// read list of files into data array from file filelist
fs.readFile(filelist, function(filelist_err, data) {
if(filelist_err) throw filelist_err;
// HACK stoopid bug split adds empty line to array, use replace to fix
var array = data.toString().replace(/\n$/, '').split('\n');
key_count = array.length;
console.log('READ files['+key_count+']');
async.eachLimit(array, 2, insertFile, function(err) {
if (err) console.log('LAST with async err['+err+']');
console.log('DONE Shutting down client, no more keys');
db.shutdown();
});
});
And successful output:
$ node testcase.js
READ files[3]
LOAD [files.big.txt]
LOAD [files.little.txt]
PASS [files.little.txt]
LOAD [files.txt]
PASS [files.big.txt]
PASS [files.txt]
DONE Shutting down client, no more keys

Writing to files in Node.js

I've been trying to find a way to write to a file when using Node.js, but with no success. How can I do that?
There are a lot of details in the File System API. The most common way is:
const fs = require('fs');
fs.writeFile("/tmp/test", "Hey there!", function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
// Or
fs.writeFileSync('/tmp/test-sync', 'Hey there!');
Currently there are three ways to write a file:
fs.write(fd, buffer, offset, length, position, callback)
You need to wait for the callback to ensure that the buffer is written to disk. It's not buffered.
fs.writeFile(filename, data, [encoding], callback)
All data must be stored at the same time; you cannot perform sequential writes.
fs.createWriteStream(path, [options])
Creates a WriteStream, which is convenient because you don't need to wait for a callback. But again, it's not buffered.
A WriteStream, as the name says, is a stream. A stream by definition is “a buffer” containing data which moves in one direction (source ► destination). But a writable stream is not necessarily “buffered”. A stream is “buffered” when you write n times, and at time n+1, the stream sends the buffer to the kernel (because it's full and needs to be flushed).
In other words: “A buffer” is the object. Whether or not it “is buffered” is a property of that object.
If you look at the code, the WriteStream inherits from a writable Stream object. If you pay attention, you’ll see how they flush the content; they don't have any buffering system.
If you write a string, it’s converted to a buffer, and then sent to the native layer and written to disk. When writing strings, they're not filling up any buffer. So, if you do:
write("a")
write("b")
write("c")
You're doing:
fs.write(new Buffer("a"))
fs.write(new Buffer("b"))
fs.write(new Buffer("c"))
That’s three calls to the I/O layer. Although you're using “buffers”, the data is not buffered. A buffered stream would do: fs.write(new Buffer ("abc")), one call to the I/O layer.
As of now, in Node.js v0.12 (stable version announced 02/06/2015) now supports two functions:
cork() and
uncork(). It seems that these functions will finally allow you to buffer/flush the write calls.
For example, in Java there are some classes that provide buffered streams (BufferedOutputStream, BufferedWriter...). If you write three bytes, these bytes will be stored in the buffer (memory) instead of doing an I/O call just for three bytes. When the buffer is full the content is flushed and saved to disk. This improves performance.
I'm not discovering anything, just remembering how a disk access should be done.
You can of course make it a little more advanced. Non-blocking, writing bits and pieces, not writing the whole file at once:
var fs = require('fs');
var stream = fs.createWriteStream("my_file.txt");
stream.once('open', function(fd) {
stream.write("My first row\n");
stream.write("My second row\n");
stream.end();
});
Synchronous Write
fs.writeFileSync(file, data[, options])
fs = require('fs');
fs.writeFileSync("foo.txt", "bar");
Asynchronous Write
fs.writeFile(file, data[, options], callback)
fs = require('fs');
fs.writeFile('foo.txt', 'bar', (err) => { if (err) throw err; });
Where
file <string> | <Buffer> | <URL> | <integer> filename or file descriptor
data <string> | <Buffer> | <Uint8Array>
options <Object> | <string>
callback <Function>
Worth reading the offical File System (fs) docs.
Update: async/await
fs = require('fs');
util = require('util');
writeFile = util.promisify(fs.writeFile);
fn = async () => { await writeFile('foo.txt', 'bar'); }
fn()
var path = 'public/uploads/file.txt',
buffer = new Buffer("some content\n");
fs.open(path, 'w', function(err, fd) {
if (err) {
throw 'error opening file: ' + err;
}
fs.write(fd, buffer, 0, buffer.length, null, function(err) {
if (err) throw 'error writing file: ' + err;
fs.close(fd, function() {
console.log('file written');
})
});
});
The answers provided are dated and a newer way to do this is:
const fsPromises = require('fs').promises
await fsPromises.writeFile('/path/to/file.txt', 'data to write')
see documents here for more info
I liked Index of ./articles/file-system.
It worked for me.
See also How do I write files in node.js?.
fs = require('fs');
fs.writeFile('helloworld.txt', 'Hello World!', function (err) {
if (err)
return console.log(err);
console.log('Wrote Hello World in file helloworld.txt, just check it');
});
Contents of helloworld.txt:
Hello World!
Update:
As in Linux node write in current directory , it seems in some others don't, so I add this comment just in case :
Using this ROOT_APP_PATH = fs.realpathSync('.'); console.log(ROOT_APP_PATH); to get where the file is written.
I know the question asked about "write" but in a more general sense "append" might be useful in some cases as it is easy to use in a loop to add text to a file (whether the file exists or not). Use a "\n" if you want to add lines eg:
var fs = require('fs');
for (var i=0; i<10; i++){
fs.appendFileSync("junk.csv", "Line:"+i+"\n");
}
OK, it's quite simple as Node has built-in functionality for this, it's called fs which stands for File System and basically, NodeJS File System module...
So first require it in your server.js file like this:
var fs = require('fs');
fs has few methods to do write to file, but my preferred way is using appendFile, this will append the stuff to the file and if the file doesn't exist, will create one, the code could be like below:
fs.appendFile('myFile.txt', 'Hi Ali!', function (err) {
if (err) throw err;
console.log('Thanks, It\'s saved to the file!');
});
You may write to a file using fs (file system) module.
Here is an example of how you may do it:
const fs = require('fs');
const writeToFile = (fileName, callback) => {
fs.open(fileName, 'wx', (error, fileDescriptor) => {
if (!error && fileDescriptor) {
// Do something with the file here ...
fs.writeFile(fileDescriptor, newData, (error) => {
if (!error) {
fs.close(fileDescriptor, (error) => {
if (!error) {
callback(false);
} else {
callback('Error closing the file');
}
});
} else {
callback('Error writing to new file');
}
});
} else {
callback('Could not create new file, it may already exists');
}
});
};
You might also want to get rid of this callback-inside-callback code structure by useing Promises and async/await statements. This will make asynchronous code structure much more flat. For doing that there is a handy util.promisify(original) function might be utilized. It allows us to switch from callbacks to promises. Take a look at the example with fs functions below:
// Dependencies.
const util = require('util');
const fs = require('fs');
// Promisify "error-back" functions.
const fsOpen = util.promisify(fs.open);
const fsWrite = util.promisify(fs.writeFile);
const fsClose = util.promisify(fs.close);
// Now we may create 'async' function with 'await's.
async function doSomethingWithFile(fileName) {
const fileDescriptor = await fsOpen(fileName, 'wx');
// Do something with the file here...
await fsWrite(fileDescriptor, newData);
await fsClose(fileDescriptor);
}
You can write to files with streams.
Just do it like this:
const fs = require('fs');
const stream = fs.createWriteStream('./test.txt');
stream.write("Example text");
var fs = require('fs');
fs.writeFile(path + "\\message.txt", "Hello", function(err){
if (err) throw err;
console.log("success");
});
For example : read file and write to another file :
var fs = require('fs');
var path = process.cwd();
fs.readFile(path+"\\from.txt",function(err,data)
{
if(err)
console.log(err)
else
{
fs.writeFile(path+"\\to.text",function(erro){
if(erro)
console.log("error : "+erro);
else
console.log("success");
});
}
});
Here we use w+ for read/write both actions and if the file path is not found then it would be created automatically.
fs.open(path, 'w+', function(err, data) {
if (err) {
console.log("ERROR !! " + err);
} else {
fs.write(data, 'content', 0, 'content length', null, function(err) {
if (err)
console.log("ERROR !! " + err);
fs.close(data, function() {
console.log('written success');
})
});
}
});
Content means what you have to write to the file and its length, 'content.length'.
Here is the sample of how to read file csv from local and write csv file to local.
var csvjson = require('csvjson'),
fs = require('fs'),
mongodb = require('mongodb'),
MongoClient = mongodb.MongoClient,
mongoDSN = 'mongodb://localhost:27017/test',
collection;
function uploadcsvModule(){
var data = fs.readFileSync( '/home/limitless/Downloads/orders_sample.csv', { encoding : 'utf8'});
var importOptions = {
delimiter : ',', // optional
quote : '"' // optional
},ExportOptions = {
delimiter : ",",
wrap : false
}
var myobj = csvjson.toSchemaObject(data, importOptions)
var exportArr = [], importArr = [];
myobj.forEach(d=>{
if(d.orderId==undefined || d.orderId=='') {
exportArr.push(d)
} else {
importArr.push(d)
}
})
var csv = csvjson.toCSV(exportArr, ExportOptions);
MongoClient.connect(mongoDSN, function(error, db) {
collection = db.collection("orders")
collection.insertMany(importArr, function(err,result){
fs.writeFile('/home/limitless/Downloads/orders_sample1.csv', csv, { encoding : 'utf8'});
db.close();
});
})
}
uploadcsvModule()
fs.createWriteStream(path[,options])
options may also include a start option to allow writing data at some position past the beginning of the file. Modifying a file rather than replacing it may require a flags mode of r+ rather than the default mode w. The encoding can be any one of those accepted by Buffer.
If autoClose is set to true (default behavior) on 'error' or 'finish' the file descriptor will be closed automatically. If autoClose is false, then the file descriptor won't be closed, even if there's an error. It is the application's responsibility to close it and make sure there's no file descriptor leak.
Like ReadStream, if fd is specified, WriteStream will ignore the path argument and will use the specified file descriptor. This means that no 'open' event will be emitted. fd should be blocking; non-blocking fds should be passed to net.Socket.
If options is a string, then it specifies the encoding.
After, reading this long article. You should understand how it works.
So, here's an example of createWriteStream().
/* The fs.createWriteStream() returns an (WritableStream {aka} internal.Writeable) and we want the encoding as 'utf'-8 */
/* The WriteableStream has the method write() */
fs.createWriteStream('out.txt', 'utf-8')
.write('hello world');
Point 1:
If you want to write something into a file.
means: it will remove anything already saved in the file and write the new content. use fs.promises.writeFile()
Point 2:
If you want to append something into a file.
means: it will not remove anything already saved in the file but append the new item in the file content.then first read the file, and then add the content into the readable value, then write it to the file. so use fs.promises.readFile and fs.promises.writeFile()
example 1: I want to write a JSON object in my JSON file .
const fs = require('fs');
const data = {table:[{id: 1, name: 'my name'}]}
const file_path = './my_data.json'
writeFile(file_path, data)
async function writeFile(filename, writedata) {
try {
await fs.promises.writeFile(filename, JSON.stringify(writedata, null, 4), 'utf8');
console.log('data is written successfully in the file')
}
catch (err) {
console.log('not able to write data in the file ')
}
}
example2 :
if you want to append data to a JSON file.
you want to add data {id:1, name:'my name'} to file my_data.json on the same folder root. just call append_data (file_path , data ) function.
It will append data in the JSON file if the file existed . or it will create the file and add the data to it.
const fs = require('fs');
const data = {id: 2, name: 'your name'}
const file_path = './my_data.json'
append_data(file_path, data)
async function append_data(filename, data) {
if (fs.existsSync(filename)) {
var read_data = await readFile(filename)
if (read_data == false) {
console.log('not able to read file')
} else {
read_data.table.push(data) //data must have the table array in it like example 1
var dataWrittenStatus = await writeFile(filename, read_data)
if (dataWrittenStatus == true) {
console.log('data added successfully')
} else {
console.log('data adding failed')
}
}
}
}
async function readFile(filePath) {
try {
const data = await fs.promises.readFile(filePath, 'utf8')
return JSON.parse(data)
}
catch (err) {
return false;
}
}
async function writeFile(filename, writedata) {
try {
await fs.promises.writeFile(filename, JSON.stringify(writedata, null, 4), 'utf8');
return true
}
catch (err) {
return false
}
}
You can use library easy-file-manager
install first from npm
npm install easy-file-manager
Sample to upload and remove files
var filemanager = require('easy-file-manager')
var path = "/public"
var filename = "test.jpg"
var data; // buffered image
filemanager.upload(path,filename,data,function(err){
if (err) console.log(err);
});
filemanager.remove(path,"aa,filename,function(isSuccess){
if (err) console.log(err);
});
You can write in a file by the following code example:
var data = [{ 'test': '123', 'test2': 'Lorem Ipsem ' }];
fs.open(datapath + '/data/topplayers.json', 'wx', function (error, fileDescriptor) {
if (!error && fileDescriptor) {
var stringData = JSON.stringify(data);
fs.writeFile(fileDescriptor, stringData, function (error) {
if (!error) {
fs.close(fileDescriptor, function (error) {
if (!error) {
callback(false);
} else {
callback('Error in close file');
}
});
} else {
callback('Error in writing file.');
}
});
}
});

Categories