I have a folder of .TXT extensions file with plain text in it how can I insert that plain text into the MongoDB database using the mongoose and fs module in node js
const fs = require('fs');
var parsedata;
var parsedTwice = [];
let fileName;
fs.readdir("D:/scnner_data/public", (err, data) => {
if(err) throw err
console.log(data);
fileName = data;
console.log(fileName);
});
fs.readFile('D:/scnner_data/public/DC221020042103SC.TXT',"utf8", (err, data) =>{
if (err) {
console.log(err);
} else {
parsedata = data.split("\n");
for(let item of parsedata) {
parsedTwice.push(item.split(";"));
}
//console.log('successful' , parsedTwice);
// console.log('this line is being readed' + parsedata);
}
}
)
Related
I need to find a line in a file and replace it with a new one, all using NodeJS.
Here's what I've done to achieve that:
var fs = require('fs');
fs.readFile('infra_setting.conf', 'utf-8', function(err, data){
if (err) throw err;
console.log(data)
});
var fs = require('fs')
fs.readFile('myfile.conf', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
var result = data.replace(/example/g, 'example: 12345678');
fs.writeFile('myfile.conf', result, 'utf8', function (err) {
if (err) return console.log(err);
});
});
The problem I have is that the string of the line keeps changing.
1st time => example : 2222
2nd time => example : somthing else
Is there a way to localize the line and replace it by NodeJS ?
Here's the example where i changed line from .gitignore file
// simulate fs.readFileSync('./gitignore', { encoding: 'utf8' })
const fileContent = 'node_modules\r\npackage-lock.json\r\nyarn.lock\r\n*.code-workspace\r\n'
function changeLine(content, lineString, newLineString) {
const delimeter = '\r\n'
const parts = content.split(delimeter).filter(v => v.length)
const lineIndex = parts.findIndex(v => v.includes(lineString))
parts[lineIndex] = newLineString
return parts.join(delimeter)
}
console.log(fileContent)
const change1 = changeLine(fileContent, 'node_modules', 'not_node_modules')
console.log(change1)
const change2 = changeLine(change1, 'package-lock.json', '.vscode')
console.log(change2)
try and tell me if it works or not
var fs = require('fs')
function searchReplaceFile(regexpFind, replace, FileName) {
var file = fs.createReadStream(FileName, 'utf8');
var newDATA= '';
file.on('data', function (chunk) {
newDATA+= chunk.toString().replace(regexpFind, replace);
});
file.on('end', function () {
fs.writeFile(FileName, newDATA, function(err) {
if (err) {
return console.log(err);
} else {
console.log('Updated!');
}
});
});
searchReplaceFile(/example/g, 'example: 12345678', 'infra_setting.conf');
I couldn't get contents of files from a folder using Nodejs
I am getting contents of one file using read function but not all files at once.
I hope this is correct.
const testFolder = './tests/';
const fs = require('fs');
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
fs.readFile(file, 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
console.log(data);
});
});
})
I got the answer. Here my solution.
function uAll() {
var absPath = __dirname + "/Assignment1/" + "../data/users/";
console.log(absPath);
fs.readdir(absPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
files.forEach(function (file) {
// console.log(file);
var phone = file.split(".");
fops.read('users', phone[0], function (err, newObj) {
if (!err && newObj) { // Read is successful
console.log("Read User: ", newObj);
}
else { // Error in reading
console.log("User not found");
}
});
});
});
}
You did a good job. I just want to share my idea.
const lib = {};
lib.base = "/Assignment1/" + "../data/users/";
lib.read = function(dir, file, callback) {
fs.readFile(lib.base + dir + '/' + file + '.json', 'utf-8', function(
err,
data
) {
if (!err && data) {
const parsedData = helpers.parseJsonToObject(data);
callback(false, parsedData);
} else {
callback(err, data);
}
});
};
lib.list = function(dir, callback) {
fs.readdir(lib.base + dir + '/', function(err, data) {
if (!err && data && data.length > 0) {
let trimmedFileName = [];
data.forEach(fileName => {
trimmedFileName.push(fileName.replace('.json', ''));
});
callback(false, trimmedFileName);
} else {
callback(err, data);
}
});
};
I'm trying to add a new text to an existing json file, I tried writeFileSync and appendFileSync however the text added doesn't format as json even when i use JSON.stringify.
const fs = require('fs');
fs.readFile("test.json", (err, data) => {
if( err) throw err;
var data = JSON.parse(data);
console.log(data);
});
var student = {
age: "23"
};
fs.appendFileSync("test.json", "age: 23");
// var writeData = fs.writeFileSync("test.json", JSON.stringify(student));
My json file
{ name: "kevin" }
Append turns out like this, {name: "kevin"}age: "23"
and writeFileSync turns out like {name: "kevin"}{age: "23"}
What I want is to continuously add text to my json file like so
{
name: "kevin",
age: "23"
}
First, dont use readFileSync and writeFileSync. They block the execution, and go against node.js standards. Here is the correct code:
const fs = require('fs');
fs.readFile("test.json", (err, data) => { // READ
if (err) {
return console.error(err);
};
var data = JSON.parse(data.toString());
data.age = "23"; // MODIFY
var writeData = fs.writeFile("test.json", JSON.stringify(data), (err, result) => { // WRITE
if (err) {
return console.error(err);
} else {
console.log(result);
console.log("Success");
}
});
});
What this code does:
Reads the data from the file.
Modifies the data to get the new data the file should have.
Write the data(NOT append) back to the file.
Here's what you can do: read the data from the file, edit that data, then write it back again.
const fs = require("fs")
fs.readFile("test.json", (err, buffer) => {
if (err) return console.error('File read error: ', err)
const data = JSON.parse(buffer.toString())
data.age = 23
fs.writeFile("test.json", JSON.stringify(data), err => {
if (err) return console.error('File write error:', err)
})
})
I am trying to get the name and created date of the files. In the code below it throws error when I call the api. It is reading the directory and printing all the file names but it's not sending back to callback. Any idea what is implemented wrong?
service.js
var fs = require('fs');
var path = require('path');
var async = require('async');
var currentDate = new Date();
var objToReturn = [];
var logsDirectory = './logs'
function readDirectory(env, callback) {
fs.readdir(logsDirectory + '/' + env, function(err, files) {
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + '/' + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
});
},
function(err) {
if (err) {
console.info('error', err);
return;
}
// when you're done reading all the files, do something...
console.log('before Callback', objToReturn);
callback(objToReturn);
});
}
exports.readDirectory = readDirectory;
app.js
var stDirectory = require('./app/serverfiles/stDir');
app.get('/getAllFiles',function(req,res){
var env = req.query.env
console.log('printing',env);
stDirectory.readDirectory(env,function(files){
res.json(files);
console.log('Api files',files);
});
});
There are a few issues:
instead of passing the "final" handler to async.eachSeries(), you're passing it to fs.readdir(), so callback will never get called;
you're declaring objToReturn outside of the function, which isn't a good idea because multiple requests could be handled in parallel;
you're not handling any errors properly;
you should really use the Node.js callback idiom of calling callbacks with two arguments, the first being errors (if there are any) and the second being the result of the asynchronous operation.
The code below should fix these issues:
function readDirectory(env, callback) {
let objToReturn = [];
fs.readdir(
logsDirectory + "/" + env,
function(err, files) {
if (err) return callback(err);
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + "/" + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
return done(err);
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
}, function(err) {
if (err) {
console.info("error", err);
return callback(err);
}
// when you're done reading all the files, do something...
console.log("before Callback", objToReturn);
callback(null, objToReturn);
}
);
}
// To call it:
stDirectory.readDirectory(env, function(err, files) {
if (err) {
res.sendStatus(500);
} else {
res.json(files);
console.log('Api files',files);
}
});
You should also consider using async.mapSeries() instead of async.eachSeries() and using a separate array (objToReturn).
So i did that kind of code.
var csv = require('csv-stream');
var request = require('request');
var fs = require('fs');
var pg = require('pg');
var conString = "pg://admin:admin#localhost:5432/labels";
// All of these arguments are optional.
var options = {
delimiter : ';', // default is ,
endLine : '\n', // default is \n,
escapeChar : '"', // default is an empty string
enclosedChar : '"', // default is an empty string
}
try {
var csvStream = csv.createStream(options);
fs.createReadStream('personss.csv').pipe(csvStream)
.on('error', function (err) {
console.error(err);
})
.on('data', function (data) {
// outputs an object containing a set of key/value pair representing a line found in the csv file.
console.log(data);
pg.connect(conString, function (err, client, done) {
client.query(
'INSERT into test (firstname, lastname) from',
function (err, result) {
if (err) {
console.log(err);
} else {
console.log('data inserted');
}
});
});
});
} catch (e) {
console(e.message);
};
where i stream the csv file with csv-stream library, how i can insert now that values from variable 'data' to my database? Should i do it by for cycle?
Update
added for cycle to iterate data
pg.connect(conString, function (err, client, done) {
for (var i = 0; data.length; i++) {
client.query(
'INSERT into test (firstname, lastname)',
function (err, result) {
if (err) {
console.log(err);
} else {
console.log('data inserted' + result.data[0]);
}
});
};
});