get specific file content via node js - javascript

I use the following code to get specific file which is located in my c folder,
my question is how to get specific content inside this file for example
i've text file and the content is
name:test 1 test2 test 3
user: foo bar
I need that when the file was read to get the test1 test2 test3 as "string" when I find "key" of name in the text file.
How can I do that ?
fs = require('fs')
fs.readFile('c//myfile.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
});

The other answer will have issues when your value contains ':'.
For example when your line is 'name: bla:bli:blub', you would only get 'bla' as a result.
So this is my suggestion that wont have that problem:
You could place this snippet into your readFile method:
var keyValueObject = {};
data.split("\n").forEach(function(element) {
var keyValueSeperatorPosition = element.indexOf(':');
var key = element.substr(0, keyValueSeperatorPosition);
var value = element.substring(keyValueSeperatorPosition + 1);
keyValueObject[key] = value;
});
You can then access your values using:
keyValueObject['name']

EDIT1:
done it a little more generic now. keyVals is now an array of objects with key and val pairs of your file
fs = require('fs')
var keyVals = [];
fs.readFile('c//myfile.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
var lines = data.split('\n');
lines.forEach(function(line){
var pair = line.split(':');
keyVals.push({key:pair[0],val:pair[1]});
});
});
access it like this:
keyVals.forEach(function(element){
console.log(element.key, element.value);
})
I hope this helps

Related

Nodejs Convert text to JSON

For some reason I'm having such a hard time converting this txt file to an actual javascript array.
myJson.txt
{"action": "key press", "timestamp": 1523783621, "user": "neovim"}
{"action": "unlike", "timestamp": 1523784584, "user": "r00k"}
{"action": "touch", "timestamp": 1523784963, "user": "eevee"}
{"action": "report as spam", "timestamp": 1523786005, "user": "moxie"}
Currently what I have that doesn't work
const fs = require('fs');
function convert(input_file_path) {
const file = fs.readFileSync(input_file_path, 'utf8');
const newFormat = file
.replace(/(\r\n\t|\n|\r\t)/gm,'')
.replace(/}{/g, '},{');
console.log([JSON.parse(newFormat)]);
}
convert('myJson.txt');
Since your file contains a JSON object per line, you could read that file line by line, using readline.
Each line is then parsed, and push into an array, which is then returned (resolved) after the file is fully read.
'use strict';
const fs = require('fs');
const readline = require('readline');
function convert(file) {
return new Promise((resolve, reject) => {
const stream = fs.createReadStream(file);
// Handle stream error (IE: file not found)
stream.on('error', reject);
const reader = readline.createInterface({
input: stream
});
const array = [];
reader.on('line', line => {
array.push(JSON.parse(line));
});
reader.on('close', () => resolve(array));
});
}
convert('myJson.txt')
.then(res => {
console.log(res);
})
.catch(err => console.error(err));
I would have done this in this way
var fs = require('fs');
var readline = require('readline');
var array = [];
var input = null;
var rd = readline.createInterface({
input: fs.createReadStream(__dirname+'/demo.txt')
});
rd.on('line', function(line) {
array.push(JSON.parse(line));
});
rd.on('close', function(d){
array.forEach(e=>console.log(e.action))
})
What's happening here is, I am reading the lines of the file on by one using readline which is one of the core modules of nodejs. Listening on the events and doing what needed.
And yeah, you'll have to parse the line to JSON for sure ;)
Thanks
The problem with your code is that you're trying to parse JS array as JSON array. while JSON array string should be only string.
Here what you're trying to do:
jsArray = ['{"foo": "bar"}, {"foo":"baz"}']
This is a valid JS array of a single string value '{"foo": "bar"}, {"foo":"baz"}'.
while
jsonArrayStr = '["{"foo": "bar"}, {"foo":"baz"}"]'
This is a valid JSON array string (as the square brackets is part of the string).
So as to get your code running, you need to add the square brackets to your string before parsing it.
function convert(input_file_path) {
const file = fs.readFileSync(input_file_path, 'utf8');
const newFormat = file
.replace("{", "[{")
.replace(/}$/, "}]")
console.log(JSON.parse('[' + newFormat + ']'));
}
What I'm doing in the script is reading the content of text file line by line and storing it to array along with converting it to JSON object. When we reach last line and our JSON array/object has all the data. Now you can write this object to a new file fs.writeFileSync() after converting JSON object to string with JSON.stringify().
Note :- You've to install Line reader package i.e. npm install line-reader
var lineReader = require('line-reader');
var fs = require('fs')
var jsonObj = {};
var obj = [];
var file = "fileName.json"
var num= 0;
lineRead();
async function lineRead(){
lineReader.eachLine('input.txt', function(line, last) {
// to check on which line we're.
console.log(num);
num++;
convertJson(line)
if(last){
//when it's last line we convert json obj to string and save it to new file.
var data = JSON.stringify(obj)
fs.writeFileSync(file,data);
}
});
}
function convertJson(data){
var currentVal = data
var value = JSON.parse(data)
var temp = value;
//storing the value in json object
jsonObj = value;
obj.push(jsonObj);
}
}

Node.js - Array undefined but logging on console

What I'm trying to do is to download a csv file, read it line by line and to add the splitted line (on ',') to tmparray.
This code works and prints all the element in the array.
var request = require('request');
var fs = require('fs');
readline = require('readline');
try {
request('https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.csv').pipe(fs.createWriteStream("MyCsv.txt"));
} catch (e) {
console.error(e);
}
var inputFile = 'MyCsv.csv';
var tmparray;
//read the file
var rd = readline.createInterface({
input: fs.createReadStream('/home/nome/Node/MyCsv.csv')
});
try {
//read line by line
rd.on('line', (line) => {
tmparray += line.split(",");
//print the elements
tmparray.forEach((element) => {
console.log(element);
}, this);
});
} catch (e) {
console.error(e);
}
What I want to do is to print the array after I assigned it.
I've tried this:
var request = require('request');
var fs = require('fs');
readline = require('readline');
try {
request('https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.csv').pipe(fs.createWriteStream("MyCsv.txt"));
} catch (e) {
console.error(e);
}
var inputFile = 'MyCsv.csv';
var tmparray;
//read the file
var rd = readline.createInterface({
input: fs.createReadStream('/home/nome/Node/MyCsv.csv')
});
try {
//read line by line
rd.on('line', (line) => {
tmparray += line.split(",");
});
} catch (e) {
console.error(e);
} finally {
console.log(tmparray); // undefined
// or this: console.log(tmparray[0]) can't read the property '0' of undefined
}
but the array is printed as if it is undefined
The problem is that rd.on(...) is asynchronous.
That means that you are telling rd that when it reads a line, it should add it to tmparray — but that doesn't actually happen yet. It happens moments later, after you console.log(tmparray).
You should say rd.on('close', () => console.log(tmparray)) to tell Node "when you have finished reading rd, then log the data".
There are a couple of other issues in the code but they should be easier to find once this is fixed. Looking at it, I think line isn't an event on readable streams so you should say rd.on('data', ...) instead; and you're trying to build up an array using the + operator which doesn't work. It will probably convert everything to strings though, so it should log something fairly reasonable for now.
Why not use the csv package it will give you the same result, Here is an example of transforming csv file into array:
const csv = require('csv')
, request = require('request');
var url = 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.csv';
request(url, function (err, response, data) {
if (err) throw err;
csv.parse(data, function (err, data) {
if (err) throw err;
// here you get your array
console.log(data);
});
});

Writing to file only writes last item, not all items, why?

i'm trying to write a feed to a file using node.js. the problem is, it doesn't write all the feeds, only the last 1.
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.writeFile("articles.json", JSON.stringify(article.title), function(err) {
if(err) {
console.log(err);
}
});
});
Why?
Just change fs.writeFile( to fs.appendFile( and you're fine.
fs.writeFile overwrites your file each time you call it whereas fs.appendFile adds to a file.
As #Robert says you should use appendFile, but also note that that change won't write out valid json. I'm not sure what output you're trying to achieve - it you just want the titles you could write out a txt file with a title on each line like so:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.appendFile("articles.txt", article.title + "\n", function(err) {
if(err) {
console.log(err);
}
});
});
To write out json you can do:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
let titles = [];
feedParser.parseUrl(url)
.on('article', function (article) {
console.log('title; ', article.title);
titles.push(article.title);
})
.on('end', function () {
fs.writeFile('articles.json', JSON.stringify({ titles }), function (err) {
if (err) {
console.log(err);
}
});
});
fs.writeFile comes with some options like flag. Default value of flag is w for write, so your data are replaced by the new one.
Use 'a' instead
{flag:'a'}
and you'll be fine.
But don't forget that WriteFile or AppendFile are upper layer in fs library which open and close file each time you need to add data.
Preferably, use fs.createWriteStream which returns a writable stream (writable file handle in other languages). Then use and reuse this stream when you need to write data in your file.

How to read specific data from an uploaded txt file on node js

I need a client to upload a text file. Then I want to parse the text file such that only lines with the word "object" in it is the only thing left in the text file. I have successfully coded the uploading part. But need help coding how to parse out the lines with "object" not in it. My node js code is below.
You can use the ReadLine API that's part of Node Core to iterate through the file line-by-line. You can use string.includes() to determine if your line contains the phrase you're looking for.
var readline = require('readline');
var fs = require('fs');
function filterFile(phrase, input) {
return Promise((resolve, reject) => {
var lines = [];
let rl = readline.createInterface({
input: input
});
rl.on('line', (line) => {
if (line.includes(phrase, 0))
lines.push(line);
});
rl.on('close', () => {
let filteredLines = Buffer.from(lines);
return resolve(fs.createReadStream(filteredLines));
});
rl.on('error', (err) => {
return reject(err);
});
});
}
Edit for Filtered Output Write Stream Example
We can take the resulting stream returned by filterFile() and pipe its contents into a new file like so
var saveDest = './filteredLines.txt');
filterFile('object', inputStream)
.then((filteredStream) => {
let ws = fs.createWriteStream(saveDest);
filteredStream.once('error', (err) => {
return Promise.reject(err);
});
filteredStream.once('end', () => {
console.log(`Filtered File has been created at ${saveDest}`);
return Promise.resolve();
});
filteredStream.pipe(ws);
});
Step : 1
Divide the line using --
var x='i am object\ni m object';
var arr = x.split('\n');
Step : 2
For each line, test with object regexp
var reg = /object/g
if(reg.test(<eachline>)){
// write new line
}else{
// do nothing
}

how to read a file, store data and then write it

I have a text file with a ton of values that I want to convert to meaningful JSON using node.js fs module.
I want to store the first value of every line in an array unless the value is already present.
7000111,-1.31349,36.699959,1004,
7000111,-1.311739,36.698589,1005,
8002311,-1.262245,36.765884,2020,
8002311,-1.261135,36.767544,2021,
So for this case, I'd like to write to a file:
[7000111, 8002311]
Here's what I have so far. It writes [] to the file.
var fs = require('fs');
var through = require('through');
var split = require('split');
var shape_ids = [];
var source = fs.createReadStream('data/shapes.txt');
var target = fs.createWriteStream('./output3.txt');
var tr = through(write, end);
source
.pipe(split())
.pipe(tr)
// Function definitions
function write(line){
var line = line.toString();
var splitted = line.split(',');
// if it's not in array
if (shape_ids.indexOf(splitted[0]) > -1){
shape_ids.push(splitted[0]);
}
}
function end(){
shape_ids = JSON.stringify(shape_ids);
target.write(shape_ids);
console.log('data written');
}
The code is using the split and through modules
How do I store values in the array and write the populated array to the file?
== === ====== =================
Update:
This is what I want to do, but it's in Ruby:
shape_ids = []
File.open("data/shapes.txt").readlines.each do |line|
data = line.split(',')
shape_id = data.first
if !shape_ids.include? shape_id
shape_ids.push(shape_id)
end
end
puts shape_ids # array of unique shape_ids
Can I do this in javascript?
Unless you are super comfortable with the new Stream API in node, use the event-stream module to accomplish this:
var fs = require('fs');
var es = require('event-stream');
function getIds(src, target, callback) {
var uniqueIDs = [];
es.pipeline(
fs.createReadStream(src),
es.split(),
es.map(function (line, done) {
var id = line.split(',').shift();
if (uniqueIDs.indexOf(id) > -1) return done();
uniqueIDs.push(id);
done(null);
}),
es.wait(function (err, text) {
// Here we create our JSON — keep in mind that valid JSON starts
// as an object, not an array
var data = JSON.stringify({ ids: uniqueIDs});
fs.writeFile(target, data, function (err) {
if ('function' == typeof callback) callback(err);
});
})
);
}
getIds('./values.txt', './output.json');
Unfortunately there is no "easy" way to keep this as a pure stream flow so you have to "wait" until the data is done filtering before turning into a JSON string. Hope that helps!

Categories