I'm having issues to write a file in a specific directory while calling fs.writeFile from nodejs.
It ends up creating the file in the same parent directory from where it is called.
function ensureDirectoryExists(filePath) {
const dirname = path.dirname(filePath);
if (fs.existsSync(dirname)) {
return true;
}
ensureDirectoryExists(dirname);
fs.mkdirSync(dirname);
}
function getFilePath(fileName, resultPath) {
return path.join(resultPath, fileName);
}
export function writeDataToFile(fileName, resultPath) {
fs.writeFile(getFilePath(fileName, resultPath), data, function (err) {
if(err) {
console.log('Error: ', err);
}
console.log('Saved successfully');
});
}
So, my method writeDataToFile is called in my file - 'test.js'. So, I'm sending the resultPath to writeDataToFile from 'test.js' as -'parent/childDir/data/resultsDir'.
However, the file is being created at - 'parent/childDir/tests/testDir1'.
How could I fix this and create directory at run time before calling 'getFilePath()' using 'ensureDirectoryExists()' method?
Folder Structure:
Related
I am writing software which, among other things, downloads a zip archive using the Dropbox API and then unzips that archive using yauzl.
The way the files are stored and downloaded from DB often ends up with nested folders, and I need to keep it that way.
However my implementation of yauzl is not capable of unzipping while keeping that nested folder structure, if there is a nested folder in the archive it does not unzip at all.
Here is my unzip function, which is the default yauzl example with the addition of local file write at the end.
const unzip = () => {
let zipPath = "pathToFile.zip"
let extractPath = "pathToExtractLocation"
yauzl.open(zipPath, {lazyEntries: true}, function(err, zipfile) {
if (err) throw err;
zipfile.readEntry();
zipfile.on("entry", function(entry) {
if (/\/$/.test(entry.fileName)) {
// Directory file names end with '/'.
// Note that entries for directories themselves are optional.
// An entry's fileName implicitly requires its parent directories to exist.
zipfile.readEntry();
} else {
// file entry
zipfile.openReadStream(entry, function(err, readStream) {
if (err) throw err;
readStream.on("end", function() {
zipfile.readEntry();
});
const writer = fs.createWriteStream(path.join(extractPath, entry.fileName));
readStream.pipe(writer);
});
}
});
});
}
Removing the if (/\/$/.test(entry.fileName)) check treats the top level folder as a file, extracting it with no file extension and 0kb size. What I want it to do is extract the archive including subfolders (to at least a depth of 2, being aware of the risk of zip bombing).
Is that possible using yauzl?
The code needs to create the directory tree at the extract path. You may use fs.mkdir with the recursive option to ensure that a directory exists before extract to it.
if (/\/$/.test(entry.fileName)) {
// Directory file names end with '/'.
// Note that entries for directories themselves are optional.
// An entry's fileName implicitly requires its parent directories to exist.
zipfile.readEntry();
} else {
// file entry
fs.mkdir(
path.join(extractPath, path.dirname(entry.fileName)),
{ recursive: true },
(err) => {
if (err) throw err;
zipfile.openReadStream(entry, function (err, readStream) {
if (err) throw err;
readStream.on("end", function () {
zipfile.readEntry();
});
const writer = fs.createWriteStream(
path.join(extractPath, entry.fileName)
);
readStream.pipe(writer);
});
}
);
}
I want to save the output of node.js & console.log as a JSON file.
The first part is working fine but unable to save it on the file. I also tried to save through:
$node filename.js > test.json
It's working but it can't format files properly
var gplay = require('google-play-scraper');
const fs = require('fs')
function def() {
gplay.app({appId: 'com.sablostudio.printing3d.town.builder', country: 'us'}).then(console.log) ;
}
def();
fs.writeFile('./newconstomer.json', JSON.stringify(--------???, null, 2), err => {
if (err) {
console.log(err);
} else {
console.log("file done");
}
You can do this with writeFile in this way:
First, you need a logger function that takes the log and saves it to the newconstomer.json file:
function logger(log){
fs.writeFile('./newconstomer.json', JSON.stringify(log), function (err) {
if (err) {
console.log(err);
}
console.log("file done");
});
}
Now, replace the console.log method with the logger function:
function def() {
gplay.app({appId: 'com.sablostudio.printing3d.town.builder', country: 'us'}).then( logger ) ; // <--- place the logger function here
}
def();
Explanation:
The logger function takes a log parameter and parsed it by JSON.stringify() method.
I am reading below data from file now i want to create json object from it,how can i create that using nodejs fs module ?
app.js
var path = "./ditLogs/" + file;
fs.readFile(path, function(err, data) {
console.log('reading file data', data.toString());
});
Here is data in file that i need to create json for
file.txt
{"event":"test data"}
{"event":"test data"}
{"event":"test data"}
You can use this sample function:
function(strFromFile) {
try {
return JSON.parse(strFromFile);
} catch(e) {
return {};
}
};
FS Module
While developing in NodeJS the need to access a computer's file system is sometimes necessary. FS Module is a utility that assists with this process.
FS includes some functions that allow for different activities to be done on the file system via a wrapper around the API.
This should be included/required/imported into any JS that needs to interact with the file system API.
var fs = require("fs");
These are the different methods you can use on this API which are all asynchronous:
fs.readFile {fs.readFile('input.txt', function(err, data)
// Asynchronous read
fs.readFile('input.txt', function (err, data) {
if (err) {
return console.error(err);
}
console.log("Asynchronous read: " + data.toString());
});
fs.writeFile
fs.writeFile('input.txt', 'Simply Easy Learning!', function(err) {
if (err) {
return console.error(err);
}
console.log("Data written successfully!");
console.log("Let's read newly written data");
fs.readFile('input.txt', function (err, data) {
if (err) {
return console.error(err);
}
console.log("Asynchronous read: " + data.toString());
});
});
open
// Asynchronous - Opening File
console.log("Going to open file!");
fs.open('input.txt', 'r+', function(err, fd) {
if (err) {
return console.error(err);
}
console.log("File opened successfully!");
});
fs.stat (provides information about the file) e.g.
fs.stat('input.txt', function (err, stats) {
if (err) {
return console.error(err);
}
console.log(stats);
console.log("Got file info successfully!");
// Check file type
console.log("isFile ? " + stats.isFile());
console.log("isDirectory ? " + stats.isDirectory());
});
fs.read (similar to readFile and should not be the first choice for reading a file)
fs.close
// Close the opened file.
fs.close(fd, function(err){
if (err){
console.log(err);
}
console.log("File closed successfully.");
});
ftruncate (truncate an opened file)
unlink (delete an opened file)
fs.unlink('input.txt', function(err) {
if (err) {
return console.error(err);
}
console.log("File deleted successfully!");
});
fs.mkdir (make new directory)
fs.mkdir('/tmp/test',function(err){
if (err) {
return console.error(err);
}
console.log("Directory created successfully!");
});
fs.readdir (reads a directory)
fs.readdir("/tmp/",function(err, files){
if (err) {
return console.error(err);
}
files.forEach( function (file){
console.log( file );
});
});
fs.rmdir (remove directory)
fs.rmdir("/tmp/test",function(err){
if (err) {
return console.error(err);
}
console.log("Going to read directory /tmp");
fs.readdir("/tmp/",function(err, files){
if (err) {
return console.error(err);
}
files.forEach( function (file){
console.log( file );
});
});
});
Synchronous functions:
readFileSync
// Synchronous read
var data = fs.readFileSync('input.txt');
console.log("Synchronous read: " + data.toString());
writeFileSync
// Synchronous write
var data = fs.writeFileSync('input.txt', 'asdasdasd');
Simply read using line-by-line package, less headache, more control on reading process (it can pause, resume reading, close file descriptor ondemand, skip N lines) with less code.
1) install:
npm i --save line-by-line
npm i --save lodash
2) implement
var lineByLine = require('line-by-line'),
_ = require('lodash'),
path = require('path');
var lines = [];
var filePath = path.join(__dirname, "ditLogs", file);
var fileReader = new lineByLine(filePath);
fileReader.on('line', function(line) {
line = JSON.parse(line);
if(_.isPlainObject(line) && !_.isEmpty(line)) {
lines.push(line);
}
// optional
doSomethingWithLine(line);
});
function doSomethingWithLine(line) {
// for example You can save to db or send to somewhere using request libary or just show in console
}
fileReader.on('error', function(error) {
console.error(error);
process.exit(-1);
});
fileReader.on('end', function() {
doSomethingAfterParsingAllLines(lines);
});
function doSomethingAfterParsingAllLines(records) {
// do something with data
}
'use strict';
const fs = require('fs');
let rawdata = fs.readFileSync('student.json');
let student = JSON.parse(rawdata);
console.log(student);
I am trying to save a file in a local directory from my s3 bucket. When I run the code everything seems to work fine because no errors are prompted in the console but when I open the directory the file size is just 15 bytes and it's the same story with on file I try to download.
I tried to download a text file and inside I found written [object Object], can anyone help me? This is the function code:
var s3 = new AWS.S3();
s3.getObject(
{ Bucket: "chat-mp-files", Key: conf[1] },
function (error, data) {
if (error != null) {
console.log(err)
} else {
fs.closeSync(fs.openSync(pathstr + '/r/' + conf[1], 'w'));
fs.writeFile(pathstr + '/r/' + conf[1], data, function (err) {
if (err) {
console.log(err);
} else {
console.log("ok");
}
});
});
I have just solved my issue using the official docs section provided by Amazon here
The following will read and import many CSV files from disk into MongoDB but NodeJS won't exit after importing all the files if it doesn't go through the resizePhoto() function (Which contains a process.exit after resizing the images).
How can I have it to close properly after importing all files without interrupting? If I add a process.exit .on end it will exit after importing the first file.
var importData = function(fileName) {
// Get file from disk.
var filePath = path.join(folder, fileName);
// Read and import the CSV file.
csv.fromPath(filePath, {
objectMode: true,
headers: keys
})
.on('data', function (data) {
var Obj = new models[fileName](data);
models[fileName].find({}).remove().exec();
Obj.save(function (err, importedObj) {
if (err) {
console.log(fileName, err);
} else if (fileName === 'PHOTOS') {
resizePhoto(importedObj);
}
});
})
.on('end', function() {
console.log(fileName + ': Imported.');
});
};
module.exports = importData;
Use the module async, method parallel (https://github.com/caolan/async#parallel). It can call your tasks (import) in parallel and call the final handler (exit) after all tasks end.
In your case:
1) Somewhere in project
csvImp=require('importData.js');
async.parallel([
function(done){csvImp(name1,done);},
function(done){csvImp(name2,done);},
...
function(done){csvImp(nameN,done);}
],
function(err, results){
process.exit();
});
2) in importData.js
var importData = function(fileName,done) {
...
.on('end', function() {
console.log(fileName + ': Imported.');
done();
});
So, next you need to prepare list of tasks. Something like
names.forEach(function(n){
tasks.push(function(done){csvImp(n,done);});
});
And call async.parallel with tasks.
Good luck)