I'm trying to create a tool that will allow me to quickly update several text strings in an html file. I'm doing this by searching for the contents of tags with specific ids and then replacing their text content. I've got much of it working but I'm not sure how to enable this to write multiple replacements into the file. As it is, it's simply running only the last of my replacements.
Here's what I have:
var fs = require('fs')
fs.readFile('sample.html', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
var title1 = "boogers"
var text1 = "ronald mcdonald";
var result = data.replace(/<div id="text1">(.*)<\/div>/g, '<div id="text1">' + text1 + '</div>');
var result = data.replace(/<title>(.*)<\/title>/g, '<title>' + title1 + '</title>');
fs.writeFile('sample.html', result, 'utf8', function (err) {
if (err) return console.log(err);
});
});
In the code above, it will replace the title's content but not the div tag's. I imagine this is a pretty basic fix but I'm new to node js and javascript. Thanks for any help.
You are "creating" variable result twice, and attributing the replacement over data twice. So, in the second attribution you loose the replacement done before. Try this:
fs.readFile('sample.html', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
var title1 = "boogers"
var text1 = "ronald mcdonald";
var result = data.replace(/<div id="text1">(.*)<\/div>/g, '<div id="text1">' + text1 + '</div>');
result = result.replace(/<title>(.*)<\/title>/g, '<title>' + title1 + '</title>');
fs.writeFile('sample.html', result, 'utf8', function (err) {
if (err) return console.log(err);
});
});
Simply use "//g" global method
var str = "aaabbcccasbbbb"; //main string
var repStr = "ss"; //replace string
var result = str.replace(/[ab]/g, repStr); //result
console.log(result);
Result will be: sssssssssscccsssssssssss
Related
What I'm trying to do is to download a csv file, read it line by line and to add the splitted line (on ',') to tmparray.
This code works and prints all the element in the array.
var request = require('request');
var fs = require('fs');
readline = require('readline');
try {
request('https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.csv').pipe(fs.createWriteStream("MyCsv.txt"));
} catch (e) {
console.error(e);
}
var inputFile = 'MyCsv.csv';
var tmparray;
//read the file
var rd = readline.createInterface({
input: fs.createReadStream('/home/nome/Node/MyCsv.csv')
});
try {
//read line by line
rd.on('line', (line) => {
tmparray += line.split(",");
//print the elements
tmparray.forEach((element) => {
console.log(element);
}, this);
});
} catch (e) {
console.error(e);
}
What I want to do is to print the array after I assigned it.
I've tried this:
var request = require('request');
var fs = require('fs');
readline = require('readline');
try {
request('https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.csv').pipe(fs.createWriteStream("MyCsv.txt"));
} catch (e) {
console.error(e);
}
var inputFile = 'MyCsv.csv';
var tmparray;
//read the file
var rd = readline.createInterface({
input: fs.createReadStream('/home/nome/Node/MyCsv.csv')
});
try {
//read line by line
rd.on('line', (line) => {
tmparray += line.split(",");
});
} catch (e) {
console.error(e);
} finally {
console.log(tmparray); // undefined
// or this: console.log(tmparray[0]) can't read the property '0' of undefined
}
but the array is printed as if it is undefined
The problem is that rd.on(...) is asynchronous.
That means that you are telling rd that when it reads a line, it should add it to tmparray — but that doesn't actually happen yet. It happens moments later, after you console.log(tmparray).
You should say rd.on('close', () => console.log(tmparray)) to tell Node "when you have finished reading rd, then log the data".
There are a couple of other issues in the code but they should be easier to find once this is fixed. Looking at it, I think line isn't an event on readable streams so you should say rd.on('data', ...) instead; and you're trying to build up an array using the + operator which doesn't work. It will probably convert everything to strings though, so it should log something fairly reasonable for now.
Why not use the csv package it will give you the same result, Here is an example of transforming csv file into array:
const csv = require('csv')
, request = require('request');
var url = 'https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/2.5_day.csv';
request(url, function (err, response, data) {
if (err) throw err;
csv.parse(data, function (err, data) {
if (err) throw err;
// here you get your array
console.log(data);
});
});
i'm trying to write a feed to a file using node.js. the problem is, it doesn't write all the feeds, only the last 1.
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.writeFile("articles.json", JSON.stringify(article.title), function(err) {
if(err) {
console.log(err);
}
});
});
Why?
Just change fs.writeFile( to fs.appendFile( and you're fine.
fs.writeFile overwrites your file each time you call it whereas fs.appendFile adds to a file.
As #Robert says you should use appendFile, but also note that that change won't write out valid json. I'm not sure what output you're trying to achieve - it you just want the titles you could write out a txt file with a title on each line like so:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.appendFile("articles.txt", article.title + "\n", function(err) {
if(err) {
console.log(err);
}
});
});
To write out json you can do:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
let titles = [];
feedParser.parseUrl(url)
.on('article', function (article) {
console.log('title; ', article.title);
titles.push(article.title);
})
.on('end', function () {
fs.writeFile('articles.json', JSON.stringify({ titles }), function (err) {
if (err) {
console.log(err);
}
});
});
fs.writeFile comes with some options like flag. Default value of flag is w for write, so your data are replaced by the new one.
Use 'a' instead
{flag:'a'}
and you'll be fine.
But don't forget that WriteFile or AppendFile are upper layer in fs library which open and close file each time you need to add data.
Preferably, use fs.createWriteStream which returns a writable stream (writable file handle in other languages). Then use and reuse this stream when you need to write data in your file.
I am new to Node.js and JavaScript. I have a results.json file that I want to keep a running log of results from a script that pulls images from the web. However, my current script only overwrites the existing result. How do I build upon or add to the results.json so each subsequent result is logged in the results.json file? I would like it to be valid json.
Here is general example:
var currentSearchResult = someWebSearchResult
var fs = require('fs');
var json = JSON.stringify(['search result: ' + currentSearchResult + ': ', null, "\t");
fs.writeFile("results.json", json);
And the results.json:
[
"search result: currentSearchResult"
]
If you want the file to be valid JSON, you have to open your file, parse the JSON, append your new result to the array, transform it back into a string and save it again.
var fs = require('fs')
var currentSearchResult = 'example'
fs.readFile('results.json', function (err, data) {
var json = JSON.parse(data)
json.push('search result: ' + currentSearchResult)
fs.writeFile("results.json", JSON.stringify(json))
})
In general, If you want to append to file you should use:
fs.appendFile("results.json", json , function (err) {
if (err) throw err;
console.log('The "data to append" was appended to file!');
});
Append file creates file if does not exist.
But ,if you want to append JSON data first you read the data and after that you could overwrite that data.
fs.readFile('results.json', function (err, data) {
var json = JSON.parse(data);
json.push('search result: ' + currentSearchResult);
fs.writeFile("results.json", JSON.stringify(json), function(err){
if (err) throw err;
console.log('The "data to append" was appended to file!');
});
})
Promise based solution [Javascript (ES6) + Node.js (V10 or above)]
const fsPromises = require('fs').promises;
fsPromises.readFile('myFile.json', 'utf8')
.then(data => {
let json = JSON.parse(data);
json.myArr.push({name: "Krishnan", salary: 5678});
fsPromises.writeFile('myFile.json', JSON.stringify(json))
.then( () => { console.log('Append Success'); })
.catch(err => { console.log("Append Failed: " + err);});
})
.catch(err => { console.log("Read Error: " +err);});
If your project supports Javascript ES8 then you could use asyn/await instead of native promise.
I have created data. Js file to maintain data displaying in the table fomat. Including text box to read data from user and how to display data enteted through text box in table
I use the following code to get specific file which is located in my c folder,
my question is how to get specific content inside this file for example
i've text file and the content is
name:test 1 test2 test 3
user: foo bar
I need that when the file was read to get the test1 test2 test3 as "string" when I find "key" of name in the text file.
How can I do that ?
fs = require('fs')
fs.readFile('c//myfile.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
});
The other answer will have issues when your value contains ':'.
For example when your line is 'name: bla:bli:blub', you would only get 'bla' as a result.
So this is my suggestion that wont have that problem:
You could place this snippet into your readFile method:
var keyValueObject = {};
data.split("\n").forEach(function(element) {
var keyValueSeperatorPosition = element.indexOf(':');
var key = element.substr(0, keyValueSeperatorPosition);
var value = element.substring(keyValueSeperatorPosition + 1);
keyValueObject[key] = value;
});
You can then access your values using:
keyValueObject['name']
EDIT1:
done it a little more generic now. keyVals is now an array of objects with key and val pairs of your file
fs = require('fs')
var keyVals = [];
fs.readFile('c//myfile.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
var lines = data.split('\n');
lines.forEach(function(line){
var pair = line.split(':');
keyVals.push({key:pair[0],val:pair[1]});
});
});
access it like this:
keyVals.forEach(function(element){
console.log(element.key, element.value);
})
I hope this helps
Which is the simplest way to compare a hash of a file without storing it in a database?
For example:
var filename = __dirname + '/../public/index.html';
var shasum = crypto.createHash('sha1');
var s = fs.ReadStream(filename);
s.on('data', function(d) {
shasum.update(d);
});
s.on('end', function() {
var d = shasum.digest('hex');
console.log(d + ' ' + filename);
fs.writeFile(__dirname + "/../public/log.txt", d.toString() + '\n', function(err) {
if(err) {
console.log(err);
} else {
console.log("The file was saved!");
}
});
});
The above code returns the hash of the HTML file. If I edit the file how can I know if it has been changed? In other words, how can I know if the hash has been changed?
Any suggestions?
Edited
Now the hash is being saved in the log file. How can I retrieve the hash from the file and match it with the new generated one? A code example would be awesome to give me a better understanding.
There is no difference with this question, but it isn't clear for me yet how to implement it.
If you're looking for changes on a file, then you can use one of Node's filesystem functions, fs.watch. This is how it's used:
fs.watch(filename, function (event, filename) {
//event is either 'rename' or 'change'
//filename is the name of the file which triggered the event
});
The watch function is however not very consistent, so you can use fs.watchFile as an alternative. fs.watchFile uses stat polling, so it's quite a bit slower than fs.watch, which detects file changes instantly.
Watching a file will return an instance of fs.FSWatcher, which has the events change and error. Calling .close will stop watching for changes on the file.
Here's an example relating to your code:
var filename = __dirname + '/../public/index.html';
var shasum = crypto.createHash('sha1');
var oldhash = null;
var s = fs.ReadStream(filename);
s.on('data', function(d) {
shasum.update(d);
});
s.on('end', function() {
var d = shasum.digest('hex');
console.log(d + ' ' + filename);
oldhash = d.toString();
fs.writeFile(__dirname + "/../public/log.txt", d.toString() + '\n', function(err) {
if(err) {
console.log(err);
}
else {
console.log("The file was saved!");
}
});
});
//watch the log for changes
fs.watch(__dirname + "/../public/log.txt", function (event, filename) {
//read the log contents
fs.readFile(__dirname + "/../public/log.txt", function (err, data) {
//match variable data with the old hash
if (data == oldhash) {
//do something
}
});
});
What's the difference between this question and the previous one you asked? If you're not wanting to store it in a database, then store it as a file. If you want to save the hash for multiple files, then maybe put them in a JSON object and write them out as a .json file so they're easy to read/write.
EDIT
Given what you added to your question, it should be pretty simple. You might write a function to do check and re-write:
function updateHash (name, html, callback) {
var sha = crypto.createHash('sha1');
sha.update(html);
var newHash = sha.digest('hex');
var hashFileName = name + '.sha';
fs.readFile(hashFileName, 'utf8', function (err, oldHash) {
var changed = true;
if (err)
console.log(err); // probably indicates the file doesn't exist, but you should consider doing better error handling
if (oldHash === newHash)
changed = false;
fs.writeFile(hashFileName, newHash, { encoding: 'utf8' }, function (err) {
callback(err, changed);
});
});
}
updateHash('index.html', "<html><head><title>...", function (err, isChanged) {
// do something with this information ?
console.log(isChanged);
});