Pass Buffer to ChildProcess Node.js - javascript

Here I have on Node.Js where I want to do Image Processing in a Sub Process.
As you will see I take the file image.jpg and want to write it back to hello.jpg in a subprocess:
var node = require('child_process').spawn('node',['-i']);
var fs = require('fs');
node.stdout.on('data',function(data) {
var fs = require('fs');
var gm = require('gm').subClass({ imageMagick: true });
gm(data)
.resize(500, 500)
.toBuffer("jpg", function(err, buffer) {
if (err) {
console.log(err);
}else{
fs.writeFile("hello.jpg", buffer);
}
});
});
var buffer = fs.readFileSync(__dirname + "/image.jpg");
node.stdin.write(buffer);
However when I run this file I get this error:
[Error: Stream yields empty buffer]
For me it seems like the buffer is not passed correctly to the subprocess?
What do I wrong? What can I do to run Image Processing in a subtask. For me its important that Its not read from a file in the subprocess. Because I want to read one File again and then send the buffer to several subprocesses that do Image Transformations. Thanks!

You are not doing any work in a subprocess. It is just node -i and nothing else. All your image processing happens in the main process.
To fix it, you can actually run another Node process and give it some script to execute, say worker.js:
process.stdin.on('data',function(data) {
var fs = require('fs');
var gm = require('gm').subClass({ imageMagick: true });
gm(data)
.resize(500, 500)
.toBuffer("jpg", function(err, buffer) {
if (err) {
console.log(err);
}else{
fs.writeFile("hello.jpg", buffer);
}
});
});
Then you would create a subprocess from your main script:
var node = require('child_process').spawn('node', ['worker.js']);
var fs = require('fs');
var buffer = fs.readFileSync(__dirname + "/image.jpg");
node.stdin.end(buffer);
Note that I used node.stdin.end in the last line to terminate the worker.
Take a look at cluster module for the alternative approach.

Related

Writing to file only writes last item, not all items, why?

i'm trying to write a feed to a file using node.js. the problem is, it doesn't write all the feeds, only the last 1.
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.writeFile("articles.json", JSON.stringify(article.title), function(err) {
if(err) {
console.log(err);
}
});
});
Why?
Just change fs.writeFile( to fs.appendFile( and you're fine.
fs.writeFile overwrites your file each time you call it whereas fs.appendFile adds to a file.
As #Robert says you should use appendFile, but also note that that change won't write out valid json. I'm not sure what output you're trying to achieve - it you just want the titles you could write out a txt file with a title on each line like so:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.appendFile("articles.txt", article.title + "\n", function(err) {
if(err) {
console.log(err);
}
});
});
To write out json you can do:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
let titles = [];
feedParser.parseUrl(url)
.on('article', function (article) {
console.log('title; ', article.title);
titles.push(article.title);
})
.on('end', function () {
fs.writeFile('articles.json', JSON.stringify({ titles }), function (err) {
if (err) {
console.log(err);
}
});
});
fs.writeFile comes with some options like flag. Default value of flag is w for write, so your data are replaced by the new one.
Use 'a' instead
{flag:'a'}
and you'll be fine.
But don't forget that WriteFile or AppendFile are upper layer in fs library which open and close file each time you need to add data.
Preferably, use fs.createWriteStream which returns a writable stream (writable file handle in other languages). Then use and reuse this stream when you need to write data in your file.

Node.js - Reading CSV-file not working with line numbers > 500

I am currently struggling to run my Node.js server.
What I want to do:
Upload a CSV-File from mobile device to my local server and save it on the file system
Read each line of the .csv-File and save each row to my MongoDB database
Uploading and saving the file works flawlessly. Reading the .csv-File and saving each row to the database only works for files with small line numbers.
I don't know the exact number of lines when it stops working. It seems to differ every time I read a file.
Sometimes (if the line numbers are bigger than 1000) the CSV-Reader I use doesn't even start processing the file. Other times he reads only 100-200 lines and then stops.
Here is my code how I upload the file:
var fs = require('fs');
var sys = require("sys");
var url = require('url');
var http = require('http');
http.createServer(function(request, response) {
sys.puts("Got new file to upload!");
var urlString = url.parse(request.url).pathname;
var pathParts = urlString.split("/");
var deviceID = pathParts[1];
var fileName = pathParts[2];
sys.puts("DeviceID: " + deviceID);
sys.puts("Filename: " + fileName);
sys.puts("Start saving file");
var tempFile = fs.createWriteStream(fileName);
request.pipe(tempFile);
sys.puts("File saved");
// Starting a new child process which reads the file
// and inserts each row to the database
var task = require('child_process').fork('databaseInsert.js');
task.on('message', function(childResponse) {
sys.puts('Finished child process!');
});
task.send({
start : true,
deviceID : deviceID,
fileName : fileName
});
sys.puts("After task");
response.writeHead(200, {
"Content-Type" : "text/plain"
});
response.end('MESSAGE');
}).listen(8080);
This works all fine.
Now the code of the child process (databaseInsert.js):
var sys = require("sys");
var yaCSV = require('ya-csv');
var Db = require('mongodb').Db;
var dbServer = require('mongodb').Server;
process.on('message', function(info) {
sys.puts("Doing work in child process");
var fileName = info.fileName;
var deviceID = info.deviceID;
sys.puts("Starting db insert!");
var dbClient = new Db('test', new dbServer("127.0.0.1", 27017, {}), {
w : 1
});
dbClient.open(function(err, client) {
if (err) {
sys.puts(err);
}
dbClient.createCollection(deviceID, function(err, collection) {
if (err) {
sys.puts("Error creating collection: " + err);
} else {
sys.puts("Created collection: " + deviceID);
var csvReader = yaCSV.createCsvFileReader(fileName, {
columnsFromHeader : true,
'separator' : ';'
});
csvReader.setColumnNames([ 'LineCounter', 'Time', 'Activity',
'Latitude', 'Longitude' ]);
var lines = 0;
csvReader.addListener('data', function(data) {
lines++;
sys.puts("Line: " + data.LineCounter);
var docRecord = {
fileName : fileName,
lineCounter : data.LineCounter,
time : data.Time,
activity : data.Activity,
latitude : data.Latitude,
longitude : data.Longitude
};
collection.insert(docRecord, {
safe : true
}, function(err, res) {
if (err) {
sys.puts(err);
}
});
});
}
});
});
process.send('finished');
});
At first I didn't use a child process but I had the same behaviour as I have now. So I tested this.
Hopefully someone who has some experience with Node.js can help me.
I think your issue is that you are trying to read the tempFile while it is still being written to. Right now you are piping the request to the file stream (which proceeds in parallel and asynchronously) and start the reader process. The reader process will then start reading the file in parallel with the write operations. If the reader is faster (it usually will be), it will read the first couple of records but then encounter an end of file and stop reading.
To remedy this, you could only start the reader process after writing has completely finished, i.e., put the part from sys.puts("File.send"); onward into a callback of tempFile.end(...) (see http://nodejs.org/api/stream.html#stream_writable_end_chunk_encoding_callback).
Reading the file while it is still being written to, akin to the tail command in Unix, is fairly hard in my understanding (google for details on how difficult it is to implement a proper tail).
Are you familiar with mongoimport/export?
I used this in the past to export from my db to a csv file...so you can do the opposite after you upload it from the mobile-client to the server.
Its from the shell, but you can write it in code using nodeJS_ChildSpawn

Require in nodejs

The argument of require(...) in node.js is a filename. If I had a module source code in a string code, could I somehow call require(code) and load functions from that string?
I put this into a function for reuse. It creates a file in the os temp directory based on a random hash, requires it and then deletes it.
var fs = require('fs'),
os = require('os'),
crypto = require('crypto');
function requireString(moduleString) {
var token = crypto.randomBytes(20).toString('hex'),
filename = os.tmpdir() + '/' + token + '.js',
requiredModule = false;
// write, require, delete
fs.writeFileSync(filename, moduleString);
requiredModule = require(filename);
fs.unlinkSync(filename);
return requiredModule;
}
Then you can do:
var carString = "exports.start = function(){ console.log('start'); };",
car = requireString(carString);
console.log("Car:", car);
This is still more of a workaround, but more convenient to use, I think.
A work around could be to write the module source code to a temporary file ./tmp-file.js and then require('./tmp-file'), and then remove the file.
This is probably not optimal because you would either have to block and write the file synchronously, or put everything requiring that module in the callback to the async write.
A working example for async file write (gist - also includes sync file write):
var http = require('http');
var fs = require('fs');
var helloModuleString = "exports.world = function() { return 'Hello World\\n'; }";
fs.writeFile('./hello.js', helloModuleString, function (err) {
if (err) return console.log(err);
var hello = require('./hello');
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(hello.world());
}).listen(1337, '127.0.0.1');
console.log('Server running at http://127.0.0.1:1337/');
});
Results in:
$ curl 127.0.0.1:1337
> Hello World

Read a text file using Node.js?

I need to pass in a text file in the terminal and then read the data from it, how can I do this?
node server.js file.txt
How do I pass in the path from the terminal, how do I read that on the other side?
You'll want to use the process.argv array to access the command-line arguments to get the filename and the FileSystem module (fs) to read the file. For example:
// Make sure we got a filename on the command line.
if (process.argv.length < 3) {
console.log('Usage: node ' + process.argv[1] + ' FILENAME');
process.exit(1);
}
// Read the file and print its contents.
var fs = require('fs')
, filename = process.argv[2];
fs.readFile(filename, 'utf8', function(err, data) {
if (err) throw err;
console.log('OK: ' + filename);
console.log(data)
});
To break that down a little for you process.argv will usually have length two, the zeroth item being the "node" interpreter and the first being the script that node is currently running, items after that were passed on the command line. Once you've pulled a filename from argv then you can use the filesystem functions to read the file and do whatever you want with its contents. Sample usage would look like this:
$ node ./cat.js file.txt
OK: file.txt
This is file.txt!
[Edit] As #wtfcoder mentions, using the "fs.readFile()" method might not be the best idea because it will buffer the entire contents of the file before yielding it to the callback function. This buffering could potentially use lots of memory but, more importantly, it does not take advantage of one of the core features of node.js - asynchronous, evented I/O.
The "node" way to process a large file (or any file, really) would be to use fs.read() and process each available chunk as it is available from the operating system. However, reading the file as such requires you to do your own (possibly) incremental parsing/processing of the file and some amount of buffering might be inevitable.
Usign fs with node.
var fs = require('fs');
try {
var data = fs.readFileSync('file.txt', 'utf8');
console.log(data.toString());
} catch(e) {
console.log('Error:', e.stack);
}
IMHO, fs.readFile() should be avoided because it loads ALL the file in memory and it won't call the callback until all the file has been read.
The easiest way to read a text file is to read it line by line. I recommend a BufferedReader:
new BufferedReader ("file", { encoding: "utf8" })
.on ("error", function (error){
console.log ("error: " + error);
})
.on ("line", function (line){
console.log ("line: " + line);
})
.on ("end", function (){
console.log ("EOF");
})
.read ();
For complex data structures like .properties or json files you need to use a parser (internally it should also use a buffered reader).
You can use readstream and pipe to read the file line by line without read all the file into memory one time.
var fs = require('fs'),
es = require('event-stream'),
os = require('os');
var s = fs.createReadStream(path)
.pipe(es.split())
.pipe(es.mapSync(function(line) {
//pause the readstream
s.pause();
console.log("line:", line);
s.resume();
})
.on('error', function(err) {
console.log('Error:', err);
})
.on('end', function() {
console.log('Finish reading.');
})
);
I am posting a complete example which I finally got working. Here I am reading in a file rooms/rooms.txt from a script rooms/rooms.js
var fs = require('fs');
var path = require('path');
var readStream = fs.createReadStream(path.join(__dirname, '../rooms') + '/rooms.txt', 'utf8');
let data = ''
readStream.on('data', function(chunk) {
data += chunk;
}).on('end', function() {
console.log(data);
});
The async way of life:
#! /usr/bin/node
const fs = require('fs');
function readall (stream)
{
return new Promise ((resolve, reject) => {
const chunks = [];
stream.on ('error', (error) => reject (error));
stream.on ('data', (chunk) => chunk && chunks.push (chunk));
stream.on ('end', () => resolve (Buffer.concat (chunks)));
});
}
function readfile (filename)
{
return readall (fs.createReadStream (filename));
}
(async () => {
let content = await readfile ('/etc/ssh/moduli').catch ((e) => {})
if (content)
console.log ("size:", content.length,
"head:", content.slice (0, 46).toString ());
})();

Writing to files in Node.js

I've been trying to find a way to write to a file when using Node.js, but with no success. How can I do that?
There are a lot of details in the File System API. The most common way is:
const fs = require('fs');
fs.writeFile("/tmp/test", "Hey there!", function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
// Or
fs.writeFileSync('/tmp/test-sync', 'Hey there!');
Currently there are three ways to write a file:
fs.write(fd, buffer, offset, length, position, callback)
You need to wait for the callback to ensure that the buffer is written to disk. It's not buffered.
fs.writeFile(filename, data, [encoding], callback)
All data must be stored at the same time; you cannot perform sequential writes.
fs.createWriteStream(path, [options])
Creates a WriteStream, which is convenient because you don't need to wait for a callback. But again, it's not buffered.
A WriteStream, as the name says, is a stream. A stream by definition is “a buffer” containing data which moves in one direction (source ► destination). But a writable stream is not necessarily “buffered”. A stream is “buffered” when you write n times, and at time n+1, the stream sends the buffer to the kernel (because it's full and needs to be flushed).
In other words: “A buffer” is the object. Whether or not it “is buffered” is a property of that object.
If you look at the code, the WriteStream inherits from a writable Stream object. If you pay attention, you’ll see how they flush the content; they don't have any buffering system.
If you write a string, it’s converted to a buffer, and then sent to the native layer and written to disk. When writing strings, they're not filling up any buffer. So, if you do:
write("a")
write("b")
write("c")
You're doing:
fs.write(new Buffer("a"))
fs.write(new Buffer("b"))
fs.write(new Buffer("c"))
That’s three calls to the I/O layer. Although you're using “buffers”, the data is not buffered. A buffered stream would do: fs.write(new Buffer ("abc")), one call to the I/O layer.
As of now, in Node.js v0.12 (stable version announced 02/06/2015) now supports two functions:
cork() and
uncork(). It seems that these functions will finally allow you to buffer/flush the write calls.
For example, in Java there are some classes that provide buffered streams (BufferedOutputStream, BufferedWriter...). If you write three bytes, these bytes will be stored in the buffer (memory) instead of doing an I/O call just for three bytes. When the buffer is full the content is flushed and saved to disk. This improves performance.
I'm not discovering anything, just remembering how a disk access should be done.
You can of course make it a little more advanced. Non-blocking, writing bits and pieces, not writing the whole file at once:
var fs = require('fs');
var stream = fs.createWriteStream("my_file.txt");
stream.once('open', function(fd) {
stream.write("My first row\n");
stream.write("My second row\n");
stream.end();
});
Synchronous Write
fs.writeFileSync(file, data[, options])
fs = require('fs');
fs.writeFileSync("foo.txt", "bar");
Asynchronous Write
fs.writeFile(file, data[, options], callback)
fs = require('fs');
fs.writeFile('foo.txt', 'bar', (err) => { if (err) throw err; });
Where
file <string> | <Buffer> | <URL> | <integer> filename or file descriptor
data <string> | <Buffer> | <Uint8Array>
options <Object> | <string>
callback <Function>
Worth reading the offical File System (fs) docs.
Update: async/await
fs = require('fs');
util = require('util');
writeFile = util.promisify(fs.writeFile);
fn = async () => { await writeFile('foo.txt', 'bar'); }
fn()
var path = 'public/uploads/file.txt',
buffer = new Buffer("some content\n");
fs.open(path, 'w', function(err, fd) {
if (err) {
throw 'error opening file: ' + err;
}
fs.write(fd, buffer, 0, buffer.length, null, function(err) {
if (err) throw 'error writing file: ' + err;
fs.close(fd, function() {
console.log('file written');
})
});
});
The answers provided are dated and a newer way to do this is:
const fsPromises = require('fs').promises
await fsPromises.writeFile('/path/to/file.txt', 'data to write')
see documents here for more info
I liked Index of ./articles/file-system.
It worked for me.
See also How do I write files in node.js?.
fs = require('fs');
fs.writeFile('helloworld.txt', 'Hello World!', function (err) {
if (err)
return console.log(err);
console.log('Wrote Hello World in file helloworld.txt, just check it');
});
Contents of helloworld.txt:
Hello World!
Update:
As in Linux node write in current directory , it seems in some others don't, so I add this comment just in case :
Using this ROOT_APP_PATH = fs.realpathSync('.'); console.log(ROOT_APP_PATH); to get where the file is written.
I know the question asked about "write" but in a more general sense "append" might be useful in some cases as it is easy to use in a loop to add text to a file (whether the file exists or not). Use a "\n" if you want to add lines eg:
var fs = require('fs');
for (var i=0; i<10; i++){
fs.appendFileSync("junk.csv", "Line:"+i+"\n");
}
OK, it's quite simple as Node has built-in functionality for this, it's called fs which stands for File System and basically, NodeJS File System module...
So first require it in your server.js file like this:
var fs = require('fs');
fs has few methods to do write to file, but my preferred way is using appendFile, this will append the stuff to the file and if the file doesn't exist, will create one, the code could be like below:
fs.appendFile('myFile.txt', 'Hi Ali!', function (err) {
if (err) throw err;
console.log('Thanks, It\'s saved to the file!');
});
You may write to a file using fs (file system) module.
Here is an example of how you may do it:
const fs = require('fs');
const writeToFile = (fileName, callback) => {
fs.open(fileName, 'wx', (error, fileDescriptor) => {
if (!error && fileDescriptor) {
// Do something with the file here ...
fs.writeFile(fileDescriptor, newData, (error) => {
if (!error) {
fs.close(fileDescriptor, (error) => {
if (!error) {
callback(false);
} else {
callback('Error closing the file');
}
});
} else {
callback('Error writing to new file');
}
});
} else {
callback('Could not create new file, it may already exists');
}
});
};
You might also want to get rid of this callback-inside-callback code structure by useing Promises and async/await statements. This will make asynchronous code structure much more flat. For doing that there is a handy util.promisify(original) function might be utilized. It allows us to switch from callbacks to promises. Take a look at the example with fs functions below:
// Dependencies.
const util = require('util');
const fs = require('fs');
// Promisify "error-back" functions.
const fsOpen = util.promisify(fs.open);
const fsWrite = util.promisify(fs.writeFile);
const fsClose = util.promisify(fs.close);
// Now we may create 'async' function with 'await's.
async function doSomethingWithFile(fileName) {
const fileDescriptor = await fsOpen(fileName, 'wx');
// Do something with the file here...
await fsWrite(fileDescriptor, newData);
await fsClose(fileDescriptor);
}
You can write to files with streams.
Just do it like this:
const fs = require('fs');
const stream = fs.createWriteStream('./test.txt');
stream.write("Example text");
var fs = require('fs');
fs.writeFile(path + "\\message.txt", "Hello", function(err){
if (err) throw err;
console.log("success");
});
For example : read file and write to another file :
var fs = require('fs');
var path = process.cwd();
fs.readFile(path+"\\from.txt",function(err,data)
{
if(err)
console.log(err)
else
{
fs.writeFile(path+"\\to.text",function(erro){
if(erro)
console.log("error : "+erro);
else
console.log("success");
});
}
});
Here we use w+ for read/write both actions and if the file path is not found then it would be created automatically.
fs.open(path, 'w+', function(err, data) {
if (err) {
console.log("ERROR !! " + err);
} else {
fs.write(data, 'content', 0, 'content length', null, function(err) {
if (err)
console.log("ERROR !! " + err);
fs.close(data, function() {
console.log('written success');
})
});
}
});
Content means what you have to write to the file and its length, 'content.length'.
Here is the sample of how to read file csv from local and write csv file to local.
var csvjson = require('csvjson'),
fs = require('fs'),
mongodb = require('mongodb'),
MongoClient = mongodb.MongoClient,
mongoDSN = 'mongodb://localhost:27017/test',
collection;
function uploadcsvModule(){
var data = fs.readFileSync( '/home/limitless/Downloads/orders_sample.csv', { encoding : 'utf8'});
var importOptions = {
delimiter : ',', // optional
quote : '"' // optional
},ExportOptions = {
delimiter : ",",
wrap : false
}
var myobj = csvjson.toSchemaObject(data, importOptions)
var exportArr = [], importArr = [];
myobj.forEach(d=>{
if(d.orderId==undefined || d.orderId=='') {
exportArr.push(d)
} else {
importArr.push(d)
}
})
var csv = csvjson.toCSV(exportArr, ExportOptions);
MongoClient.connect(mongoDSN, function(error, db) {
collection = db.collection("orders")
collection.insertMany(importArr, function(err,result){
fs.writeFile('/home/limitless/Downloads/orders_sample1.csv', csv, { encoding : 'utf8'});
db.close();
});
})
}
uploadcsvModule()
fs.createWriteStream(path[,options])
options may also include a start option to allow writing data at some position past the beginning of the file. Modifying a file rather than replacing it may require a flags mode of r+ rather than the default mode w. The encoding can be any one of those accepted by Buffer.
If autoClose is set to true (default behavior) on 'error' or 'finish' the file descriptor will be closed automatically. If autoClose is false, then the file descriptor won't be closed, even if there's an error. It is the application's responsibility to close it and make sure there's no file descriptor leak.
Like ReadStream, if fd is specified, WriteStream will ignore the path argument and will use the specified file descriptor. This means that no 'open' event will be emitted. fd should be blocking; non-blocking fds should be passed to net.Socket.
If options is a string, then it specifies the encoding.
After, reading this long article. You should understand how it works.
So, here's an example of createWriteStream().
/* The fs.createWriteStream() returns an (WritableStream {aka} internal.Writeable) and we want the encoding as 'utf'-8 */
/* The WriteableStream has the method write() */
fs.createWriteStream('out.txt', 'utf-8')
.write('hello world');
Point 1:
If you want to write something into a file.
means: it will remove anything already saved in the file and write the new content. use fs.promises.writeFile()
Point 2:
If you want to append something into a file.
means: it will not remove anything already saved in the file but append the new item in the file content.then first read the file, and then add the content into the readable value, then write it to the file. so use fs.promises.readFile and fs.promises.writeFile()
example 1: I want to write a JSON object in my JSON file .
const fs = require('fs');
const data = {table:[{id: 1, name: 'my name'}]}
const file_path = './my_data.json'
writeFile(file_path, data)
async function writeFile(filename, writedata) {
try {
await fs.promises.writeFile(filename, JSON.stringify(writedata, null, 4), 'utf8');
console.log('data is written successfully in the file')
}
catch (err) {
console.log('not able to write data in the file ')
}
}
example2 :
if you want to append data to a JSON file.
you want to add data {id:1, name:'my name'} to file my_data.json on the same folder root. just call append_data (file_path , data ) function.
It will append data in the JSON file if the file existed . or it will create the file and add the data to it.
const fs = require('fs');
const data = {id: 2, name: 'your name'}
const file_path = './my_data.json'
append_data(file_path, data)
async function append_data(filename, data) {
if (fs.existsSync(filename)) {
var read_data = await readFile(filename)
if (read_data == false) {
console.log('not able to read file')
} else {
read_data.table.push(data) //data must have the table array in it like example 1
var dataWrittenStatus = await writeFile(filename, read_data)
if (dataWrittenStatus == true) {
console.log('data added successfully')
} else {
console.log('data adding failed')
}
}
}
}
async function readFile(filePath) {
try {
const data = await fs.promises.readFile(filePath, 'utf8')
return JSON.parse(data)
}
catch (err) {
return false;
}
}
async function writeFile(filename, writedata) {
try {
await fs.promises.writeFile(filename, JSON.stringify(writedata, null, 4), 'utf8');
return true
}
catch (err) {
return false
}
}
You can use library easy-file-manager
install first from npm
npm install easy-file-manager
Sample to upload and remove files
var filemanager = require('easy-file-manager')
var path = "/public"
var filename = "test.jpg"
var data; // buffered image
filemanager.upload(path,filename,data,function(err){
if (err) console.log(err);
});
filemanager.remove(path,"aa,filename,function(isSuccess){
if (err) console.log(err);
});
You can write in a file by the following code example:
var data = [{ 'test': '123', 'test2': 'Lorem Ipsem ' }];
fs.open(datapath + '/data/topplayers.json', 'wx', function (error, fileDescriptor) {
if (!error && fileDescriptor) {
var stringData = JSON.stringify(data);
fs.writeFile(fileDescriptor, stringData, function (error) {
if (!error) {
fs.close(fileDescriptor, function (error) {
if (!error) {
callback(false);
} else {
callback('Error in close file');
}
});
} else {
callback('Error in writing file.');
}
});
}
});

Categories