Replacing line in a file using nodejs - javascript

I need to find a line in a file and replace it with a new one, all using NodeJS.
Here's what I've done to achieve that:
var fs = require('fs');
fs.readFile('infra_setting.conf', 'utf-8', function(err, data){
if (err) throw err;
console.log(data)
});
var fs = require('fs')
fs.readFile('myfile.conf', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
var result = data.replace(/example/g, 'example: 12345678');
fs.writeFile('myfile.conf', result, 'utf8', function (err) {
if (err) return console.log(err);
});
});
The problem I have is that the string of the line keeps changing.
1st time => example : 2222
2nd time => example : somthing else
Is there a way to localize the line and replace it by NodeJS ?

Here's the example where i changed line from .gitignore file
// simulate fs.readFileSync('./gitignore', { encoding: 'utf8' })
const fileContent = 'node_modules\r\npackage-lock.json\r\nyarn.lock\r\n*.code-workspace\r\n'
function changeLine(content, lineString, newLineString) {
const delimeter = '\r\n'
const parts = content.split(delimeter).filter(v => v.length)
const lineIndex = parts.findIndex(v => v.includes(lineString))
parts[lineIndex] = newLineString
return parts.join(delimeter)
}
console.log(fileContent)
const change1 = changeLine(fileContent, 'node_modules', 'not_node_modules')
console.log(change1)
const change2 = changeLine(change1, 'package-lock.json', '.vscode')
console.log(change2)

try and tell me if it works or not
var fs = require('fs')
function searchReplaceFile(regexpFind, replace, FileName) {
var file = fs.createReadStream(FileName, 'utf8');
var newDATA= '';
file.on('data', function (chunk) {
newDATA+= chunk.toString().replace(regexpFind, replace);
});
file.on('end', function () {
fs.writeFile(FileName, newDATA, function(err) {
if (err) {
return console.log(err);
} else {
console.log('Updated!');
}
});
});
searchReplaceFile(/example/g, 'example: 12345678', 'infra_setting.conf');

Related

parsem array of string data into json

I have a folder of .TXT extensions file with plain text in it how can I insert that plain text into the MongoDB database using the mongoose and fs module in node js
const fs = require('fs');
var parsedata;
var parsedTwice = [];
let fileName;
fs.readdir("D:/scnner_data/public", (err, data) => {
if(err) throw err
console.log(data);
fileName = data;
console.log(fileName);
});
fs.readFile('D:/scnner_data/public/DC221020042103SC.TXT',"utf8", (err, data) =>{
if (err) {
console.log(err);
} else {
parsedata = data.split("\n");
for(let item of parsedata) {
parsedTwice.push(item.split(";"));
}
//console.log('successful' , parsedTwice);
// console.log('this line is being readed' + parsedata);
}
}
)

How to display contents of all files from a folder in console using node.js or express

I couldn't get contents of files from a folder using Nodejs
I am getting contents of one file using read function but not all files at once.
I hope this is correct.
const testFolder = './tests/';
const fs = require('fs');
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
fs.readFile(file, 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
console.log(data);
});
});
})
I got the answer. Here my solution.
function uAll() {
var absPath = __dirname + "/Assignment1/" + "../data/users/";
console.log(absPath);
fs.readdir(absPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
files.forEach(function (file) {
// console.log(file);
var phone = file.split(".");
fops.read('users', phone[0], function (err, newObj) {
if (!err && newObj) { // Read is successful
console.log("Read User: ", newObj);
}
else { // Error in reading
console.log("User not found");
}
});
});
});
}
You did a good job. I just want to share my idea.
const lib = {};
lib.base = "/Assignment1/" + "../data/users/";
lib.read = function(dir, file, callback) {
fs.readFile(lib.base + dir + '/' + file + '.json', 'utf-8', function(
err,
data
) {
if (!err && data) {
const parsedData = helpers.parseJsonToObject(data);
callback(false, parsedData);
} else {
callback(err, data);
}
});
};
lib.list = function(dir, callback) {
fs.readdir(lib.base + dir + '/', function(err, data) {
if (!err && data && data.length > 0) {
let trimmedFileName = [];
data.forEach(fileName => {
trimmedFileName.push(fileName.replace('.json', ''));
});
callback(false, trimmedFileName);
} else {
callback(err, data);
}
});
};

append JSON parsing from XML using NodeJS

I have three different sample.xml files which I have to convert into json output. I am trying to append all of their output into one json file. here is my code
const fs = require('fs');
const xml2js = require('xml2js');
parser = new xml2js.Parser({
explicitArray: true
})
fs.readFile('sample.xml', (err, data) => {
parser.parseString(data, (err, result) => {
let output = JSON.stringify(result.planes.plane);
fs.writeFile('output.json', output, 'utf8', (err) => {
if (err) {
throw err;
} else {
console.log('file created..')
}
})
});
});
now I know the function fs.appendfile() but I am not sure how do I do it? I have two more files named: sample2.xml and sample3.xml
this is what I have tried but the problem it is overwriting not appending.
const fs = require('fs');
const xml2js = require('xml2js');
const async = require('async');
parser = new xml2js.Parser({
explicitArray: true
})
let files = ['sample.xml', 'sample2.xml'];
async.map(files, fs.readFile, (err, files) => {
if (err) {
throw err;
} else {
files.forEach((file) => {
parser.parseString(file, (err, result) => {
let output = JSON.stringify(result.planes.plane);
fs.appendFile('output.json', output, 'utf8', (err) => {
if (err) {
throw err;
} else {
console.log('file created..')
}
})
});
})
}
})
You need to read each xml file, get the json-data from it, and then write it to the final file:
async.map(
files,
(file, cb) => {
fs.readFile(file, (err, data) => {
if (err) {
cb(err)
} else {
parser.parseString(data, (err, result) => {
cb(err, result.planes.plane)
})
}
})
},
function (err, results) {
if (err) {
throw err
} else {
let output = JSON.stringify(results)
fs.writeFile('output.json', output, 'utf8', (err) => {
if (err) {
throw err
} else {
console.log('file created...')
}
})
}
}
)

Getting error can not get header after they send when read files from directory?

I am trying to get the name and created date of the files. In the code below it throws error when I call the api. It is reading the directory and printing all the file names but it's not sending back to callback. Any idea what is implemented wrong?
service.js
var fs = require('fs');
var path = require('path');
var async = require('async');
var currentDate = new Date();
var objToReturn = [];
var logsDirectory = './logs'
function readDirectory(env, callback) {
fs.readdir(logsDirectory + '/' + env, function(err, files) {
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + '/' + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
});
},
function(err) {
if (err) {
console.info('error', err);
return;
}
// when you're done reading all the files, do something...
console.log('before Callback', objToReturn);
callback(objToReturn);
});
}
exports.readDirectory = readDirectory;
app.js
var stDirectory = require('./app/serverfiles/stDir');
app.get('/getAllFiles',function(req,res){
var env = req.query.env
console.log('printing',env);
stDirectory.readDirectory(env,function(files){
res.json(files);
console.log('Api files',files);
});
});
There are a few issues:
instead of passing the "final" handler to async.eachSeries(), you're passing it to fs.readdir(), so callback will never get called;
you're declaring objToReturn outside of the function, which isn't a good idea because multiple requests could be handled in parallel;
you're not handling any errors properly;
you should really use the Node.js callback idiom of calling callbacks with two arguments, the first being errors (if there are any) and the second being the result of the asynchronous operation.
The code below should fix these issues:
function readDirectory(env, callback) {
let objToReturn = [];
fs.readdir(
logsDirectory + "/" + env,
function(err, files) {
if (err) return callback(err);
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + "/" + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
return done(err);
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
}, function(err) {
if (err) {
console.info("error", err);
return callback(err);
}
// when you're done reading all the files, do something...
console.log("before Callback", objToReturn);
callback(null, objToReturn);
}
);
}
// To call it:
stDirectory.readDirectory(env, function(err, files) {
if (err) {
res.sendStatus(500);
} else {
res.json(files);
console.log('Api files',files);
}
});
You should also consider using async.mapSeries() instead of async.eachSeries() and using a separate array (objToReturn).

csv insert with pg library in node

So i did that kind of code.
var csv = require('csv-stream');
var request = require('request');
var fs = require('fs');
var pg = require('pg');
var conString = "pg://admin:admin#localhost:5432/labels";
// All of these arguments are optional.
var options = {
delimiter : ';', // default is ,
endLine : '\n', // default is \n,
escapeChar : '"', // default is an empty string
enclosedChar : '"', // default is an empty string
}
try {
var csvStream = csv.createStream(options);
fs.createReadStream('personss.csv').pipe(csvStream)
.on('error', function (err) {
console.error(err);
})
.on('data', function (data) {
// outputs an object containing a set of key/value pair representing a line found in the csv file.
console.log(data);
pg.connect(conString, function (err, client, done) {
client.query(
'INSERT into test (firstname, lastname) from',
function (err, result) {
if (err) {
console.log(err);
} else {
console.log('data inserted');
}
});
});
});
} catch (e) {
console(e.message);
};
where i stream the csv file with csv-stream library, how i can insert now that values from variable 'data' to my database? Should i do it by for cycle?
Update
added for cycle to iterate data
pg.connect(conString, function (err, client, done) {
for (var i = 0; data.length; i++) {
client.query(
'INSERT into test (firstname, lastname)',
function (err, result) {
if (err) {
console.log(err);
} else {
console.log('data inserted' + result.data[0]);
}
});
};
});

Categories