Nodejs Convert text to JSON - javascript

For some reason I'm having such a hard time converting this txt file to an actual javascript array.
myJson.txt
{"action": "key press", "timestamp": 1523783621, "user": "neovim"}
{"action": "unlike", "timestamp": 1523784584, "user": "r00k"}
{"action": "touch", "timestamp": 1523784963, "user": "eevee"}
{"action": "report as spam", "timestamp": 1523786005, "user": "moxie"}
Currently what I have that doesn't work
const fs = require('fs');
function convert(input_file_path) {
const file = fs.readFileSync(input_file_path, 'utf8');
const newFormat = file
.replace(/(\r\n\t|\n|\r\t)/gm,'')
.replace(/}{/g, '},{');
console.log([JSON.parse(newFormat)]);
}
convert('myJson.txt');

Since your file contains a JSON object per line, you could read that file line by line, using readline.
Each line is then parsed, and push into an array, which is then returned (resolved) after the file is fully read.
'use strict';
const fs = require('fs');
const readline = require('readline');
function convert(file) {
return new Promise((resolve, reject) => {
const stream = fs.createReadStream(file);
// Handle stream error (IE: file not found)
stream.on('error', reject);
const reader = readline.createInterface({
input: stream
});
const array = [];
reader.on('line', line => {
array.push(JSON.parse(line));
});
reader.on('close', () => resolve(array));
});
}
convert('myJson.txt')
.then(res => {
console.log(res);
})
.catch(err => console.error(err));

I would have done this in this way
var fs = require('fs');
var readline = require('readline');
var array = [];
var input = null;
var rd = readline.createInterface({
input: fs.createReadStream(__dirname+'/demo.txt')
});
rd.on('line', function(line) {
array.push(JSON.parse(line));
});
rd.on('close', function(d){
array.forEach(e=>console.log(e.action))
})
What's happening here is, I am reading the lines of the file on by one using readline which is one of the core modules of nodejs. Listening on the events and doing what needed.
And yeah, you'll have to parse the line to JSON for sure ;)
Thanks

The problem with your code is that you're trying to parse JS array as JSON array. while JSON array string should be only string.
Here what you're trying to do:
jsArray = ['{"foo": "bar"}, {"foo":"baz"}']
This is a valid JS array of a single string value '{"foo": "bar"}, {"foo":"baz"}'.
while
jsonArrayStr = '["{"foo": "bar"}, {"foo":"baz"}"]'
This is a valid JSON array string (as the square brackets is part of the string).
So as to get your code running, you need to add the square brackets to your string before parsing it.
function convert(input_file_path) {
const file = fs.readFileSync(input_file_path, 'utf8');
const newFormat = file
.replace("{", "[{")
.replace(/}$/, "}]")
console.log(JSON.parse('[' + newFormat + ']'));
}

What I'm doing in the script is reading the content of text file line by line and storing it to array along with converting it to JSON object. When we reach last line and our JSON array/object has all the data. Now you can write this object to a new file fs.writeFileSync() after converting JSON object to string with JSON.stringify().
Note :- You've to install Line reader package i.e. npm install line-reader
var lineReader = require('line-reader');
var fs = require('fs')
var jsonObj = {};
var obj = [];
var file = "fileName.json"
var num= 0;
lineRead();
async function lineRead(){
lineReader.eachLine('input.txt', function(line, last) {
// to check on which line we're.
console.log(num);
num++;
convertJson(line)
if(last){
//when it's last line we convert json obj to string and save it to new file.
var data = JSON.stringify(obj)
fs.writeFileSync(file,data);
}
});
}
function convertJson(data){
var currentVal = data
var value = JSON.parse(data)
var temp = value;
//storing the value in json object
jsonObj = value;
obj.push(jsonObj);
}
}

Related

txt file to json using Node JS

I have a simple txt file with data in this format with millions of lines:
{"a":9876312,"b":1572568981512}
{"a":9876312,"b":1572568981542}
I want to convert this into a file with "dot" json extension file using reduce function in NodeJs and return statement, probably looking like this:
[{"a":9876312,"b":1572568981512},
{"a":9876312,"b":1572568981542}]
Any help will be really really appreciated. Thanks :)
SO far I tried this:
const fs = require('fs');
const FILE_NAME = 'abc.txt';
const x = mapEvents(getJSONFileData(FILE_NAME));
function getJSONFileData(filename) {
return fs.readFileSync(filename, 'utf-8')
.split('\n')
.map(JSON.parse)
}
function mapEvents(events) {
events.reduce((acc, data) => {
return [{data.a, data.b}]
});
}
console.log(x)
I am getting an 'undefined' value constantly
I have found some issues, in your code.
You haven't returned anything from mapEvents function, that makes your varaible x value undefined.
getJSONFileData needs some fixing.
You can use below code:-
const fs = require('fs');
const FILE_NAME = 'abc.txt';
const x = mapEvents(getJSONFileData(FILE_NAME));
function getJSONFileData(filename) {
return fs
.readFileSync(filename, 'utf-8')
.split('\n')
.filter(Boolean)
.map(JSON.parse);
}
function mapEvents(events) {
return JSON.stringify(events);
}
console.log(x);

Array doesnt have expected value - Nodejs

I am running a script which looks into a directory and lists files, then checks for the file type to process, if the extension matches then the file is read and each line of the file (.col which is just a txt file renamed) is inserted into an array.
Now after the file is read and the array is populated I would like to use the array and do some further processing, e.g create a db record. I am missing something really basic here because on each console log I do as below I always get the full items (in my array) of the contents of all files.
So to make it a bit simpler:
array is empty.
Then file is read and processed and array now has
array[0]=line 0 of file
array[0]=line 1 of file etc
const fs = require('fs');
const readline =require('readline');
var files = fs.readdirSync('/home/proj/data');
var path = require('path');
var model=[];
var lineReader=[];
for(var i=0; i<files.length; i++) {
if(path.extname(files[i]) === ".col") {
lineReader[i] = readline.createInterface({
input: require('fs').createReadStream(files[i])
});
lineReader[i].on('line', function (line) {
model.push(line);
}).on('close', async function() {
console.log(model);
});
}
}
Instead the script is run and array[] holds all lines of all files that match the extension.
Your help is greatly appreciated and anyone is allowed to scorch my JS as I am pretty sure I am missing something basic here.
So, you want to read the files in parallel (because that's what your program does) and put it in an array of arrays?
You can make the reading file mechanism a promise and use it using Promise.all. Here is an example to get you started.
const fs = require('fs');
const readline = require('readline');
var files = fs.readdirSync('./');
var path = require('path');
function readFile(fileName) {
return new Promise(resolve => {
const array = [];
const lineReader = readline.createInterface({
input: fs.createReadStream(files[i])
});
lineReader.on('line', function (line) {
array.push(line);
}).on('close', async function () {
//do some proc
console.log(array);
resolve(array);
});
});
}
const readFilePromises = [];
for (var i = 0; i < files.length; i++) {
if (path.extname(files[i]) === ".js") {
readFilePromises.push(readFile(files[i]));
}
}
Promise.all(readFilePromises) //or await Promise.all([..])
.then(data => {
console.log(data);//will be array of arrays
})
If you want a single Array you can always flatten the result using data.flat()
If your files are not very big and sync methods are OK, you can simplify the code this way:
'use strict';
const fs = require('fs');
const path = require('path');
const model = [];
fs.readdirSync('/home/proj/data')
.filter(name => path.extname(name) === '.col')
.forEach((name) => {
model.push(...fs.readFileSync(name, 'utf8').split('\n'));
});
console.log(model);

How to read specific data from an uploaded txt file on node js

I need a client to upload a text file. Then I want to parse the text file such that only lines with the word "object" in it is the only thing left in the text file. I have successfully coded the uploading part. But need help coding how to parse out the lines with "object" not in it. My node js code is below.
You can use the ReadLine API that's part of Node Core to iterate through the file line-by-line. You can use string.includes() to determine if your line contains the phrase you're looking for.
var readline = require('readline');
var fs = require('fs');
function filterFile(phrase, input) {
return Promise((resolve, reject) => {
var lines = [];
let rl = readline.createInterface({
input: input
});
rl.on('line', (line) => {
if (line.includes(phrase, 0))
lines.push(line);
});
rl.on('close', () => {
let filteredLines = Buffer.from(lines);
return resolve(fs.createReadStream(filteredLines));
});
rl.on('error', (err) => {
return reject(err);
});
});
}
Edit for Filtered Output Write Stream Example
We can take the resulting stream returned by filterFile() and pipe its contents into a new file like so
var saveDest = './filteredLines.txt');
filterFile('object', inputStream)
.then((filteredStream) => {
let ws = fs.createWriteStream(saveDest);
filteredStream.once('error', (err) => {
return Promise.reject(err);
});
filteredStream.once('end', () => {
console.log(`Filtered File has been created at ${saveDest}`);
return Promise.resolve();
});
filteredStream.pipe(ws);
});
Step : 1
Divide the line using --
var x='i am object\ni m object';
var arr = x.split('\n');
Step : 2
For each line, test with object regexp
var reg = /object/g
if(reg.test(<eachline>)){
// write new line
}else{
// do nothing
}

get specific file content via node js

I use the following code to get specific file which is located in my c folder,
my question is how to get specific content inside this file for example
i've text file and the content is
name:test 1 test2 test 3
user: foo bar
I need that when the file was read to get the test1 test2 test3 as "string" when I find "key" of name in the text file.
How can I do that ?
fs = require('fs')
fs.readFile('c//myfile.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
});
The other answer will have issues when your value contains ':'.
For example when your line is 'name: bla:bli:blub', you would only get 'bla' as a result.
So this is my suggestion that wont have that problem:
You could place this snippet into your readFile method:
var keyValueObject = {};
data.split("\n").forEach(function(element) {
var keyValueSeperatorPosition = element.indexOf(':');
var key = element.substr(0, keyValueSeperatorPosition);
var value = element.substring(keyValueSeperatorPosition + 1);
keyValueObject[key] = value;
});
You can then access your values using:
keyValueObject['name']
EDIT1:
done it a little more generic now. keyVals is now an array of objects with key and val pairs of your file
fs = require('fs')
var keyVals = [];
fs.readFile('c//myfile.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
var lines = data.split('\n');
lines.forEach(function(line){
var pair = line.split(':');
keyVals.push({key:pair[0],val:pair[1]});
});
});
access it like this:
keyVals.forEach(function(element){
console.log(element.key, element.value);
})
I hope this helps

how to read a file, store data and then write it

I have a text file with a ton of values that I want to convert to meaningful JSON using node.js fs module.
I want to store the first value of every line in an array unless the value is already present.
7000111,-1.31349,36.699959,1004,
7000111,-1.311739,36.698589,1005,
8002311,-1.262245,36.765884,2020,
8002311,-1.261135,36.767544,2021,
So for this case, I'd like to write to a file:
[7000111, 8002311]
Here's what I have so far. It writes [] to the file.
var fs = require('fs');
var through = require('through');
var split = require('split');
var shape_ids = [];
var source = fs.createReadStream('data/shapes.txt');
var target = fs.createWriteStream('./output3.txt');
var tr = through(write, end);
source
.pipe(split())
.pipe(tr)
// Function definitions
function write(line){
var line = line.toString();
var splitted = line.split(',');
// if it's not in array
if (shape_ids.indexOf(splitted[0]) > -1){
shape_ids.push(splitted[0]);
}
}
function end(){
shape_ids = JSON.stringify(shape_ids);
target.write(shape_ids);
console.log('data written');
}
The code is using the split and through modules
How do I store values in the array and write the populated array to the file?
== === ====== =================
Update:
This is what I want to do, but it's in Ruby:
shape_ids = []
File.open("data/shapes.txt").readlines.each do |line|
data = line.split(',')
shape_id = data.first
if !shape_ids.include? shape_id
shape_ids.push(shape_id)
end
end
puts shape_ids # array of unique shape_ids
Can I do this in javascript?
Unless you are super comfortable with the new Stream API in node, use the event-stream module to accomplish this:
var fs = require('fs');
var es = require('event-stream');
function getIds(src, target, callback) {
var uniqueIDs = [];
es.pipeline(
fs.createReadStream(src),
es.split(),
es.map(function (line, done) {
var id = line.split(',').shift();
if (uniqueIDs.indexOf(id) > -1) return done();
uniqueIDs.push(id);
done(null);
}),
es.wait(function (err, text) {
// Here we create our JSON — keep in mind that valid JSON starts
// as an object, not an array
var data = JSON.stringify({ ids: uniqueIDs});
fs.writeFile(target, data, function (err) {
if ('function' == typeof callback) callback(err);
});
})
);
}
getIds('./values.txt', './output.json');
Unfortunately there is no "easy" way to keep this as a pure stream flow so you have to "wait" until the data is done filtering before turning into a JSON string. Hope that helps!

Categories