Say I have a file called test.txt which is fixed with nonsense text.
But among this text is a specific string containing numbers in single quotes ('') like so;
blahblahblah:
Blahblahblah:
targetNumber: '30' <== target numbers
Blahblahblah
How would I go about replacing only these single quoted numbers, without replacing the entire file contents like so;
blahblahblah:
Blahblahblah:
targetNumber: '22' <== changed numbers
Blahblahblah
So far the regex functions I've constructed using numerous other questions on StackOverflow don't seem want to work and if they do work, they don't work as expected because they always seem to replace the entire contents of the file rather than just that set of numbers.
My last regex attempt on this is below, but it doesn't work, it just replaces the entire file contents so I don't where I'm going wrong, I'm still learning regex currently, can anyone please help???
var folder = "just/a/test/folder";
fs.readFile(path.join(folder, 'test.txt'), 'utf8', (error, data) => {
if (error) {
console.log('Unable to Read File');
return;
};
var regex = /'\d{1,2}\'/
var str = data.substring(data.indexOf("targetNumber: ") + 14);
var m = str.match(regex)
var replace = str.replace(m[1], "22");
fs.writeFile(path.join(folder, 'test.txt'), replace, 'utf8', (error) => {
if (error) {
console.log('Modifying File failed')
return;
}
});
You should do the replacement over the data and then you can capture targetNumber: in a group, using that group again with $1 in the replacement followed by your replacement.
See the match and the replacement at this regex demo.
const folder = "just/a/test/folder";
const file = 'test.txt'
fs.readFile(path.join(folder, file), 'utf8', (error, data) => {
if (error) {
console.log('Unable to Read File');
return;
}
const regex = /\b(targetNumber:\s*')\d{1,2}'/g;
const replace = data.replace(regex, "$122'");
fs.writeFile(path.join(folder, file), replace, 'utf8', (error) => {
if (error) {
console.log('Modifying File failed')
}
});
});
I am trying to append a string to a log file. However writeFile will erase the content each time before writing the string.
fs.writeFile('log.txt', 'Hello Node', function (err) {
if (err) throw err;
console.log('It\'s saved!');
}); // => message.txt erased, contains only 'Hello Node'
Any idea how to do this the easy way?
For occasional appends, you can use appendFile, which creates a new file handle each time it's called:
Asynchronously:
const fs = require('fs');
fs.appendFile('message.txt', 'data to append', function (err) {
if (err) throw err;
console.log('Saved!');
});
Synchronously:
const fs = require('fs');
fs.appendFileSync('message.txt', 'data to append');
But if you append repeatedly to the same file, it's much better to reuse the file handle.
When you want to write in a log file, i.e. appending data to the end of a file, never use appendFile. appendFile opens a file handle for each piece of data you add to your file, after a while you get a beautiful EMFILE error.
I can add that appendFile is not easier to use than a WriteStream.
Example with appendFile:
console.log(new Date().toISOString());
[...Array(10000)].forEach( function (item,index) {
fs.appendFile("append.txt", index+ "\n", function (err) {
if (err) console.log(err);
});
});
console.log(new Date().toISOString());
Up to 8000 on my computer, you can append data to the file, then you obtain this:
{ Error: EMFILE: too many open files, open 'C:\mypath\append.txt'
at Error (native)
errno: -4066,
code: 'EMFILE',
syscall: 'open',
path: 'C:\\mypath\\append.txt' }
Moreover, appendFile will write when it is enabled, so your logs will not be written by timestamp. You can test with example, set 1000 in place of 100000, order will be random, depends on access to file.
If you want to append to a file, you must use a writable stream like this:
var stream = fs.createWriteStream("append.txt", {flags:'a'});
console.log(new Date().toISOString());
[...Array(10000)].forEach( function (item,index) {
stream.write(index + "\n");
});
console.log(new Date().toISOString());
stream.end();
You end it when you want. You are not even required to use stream.end(), default option is AutoClose:true, so your file will end when your process ends and you avoid opening too many files.
Your code using createWriteStream creates a file descriptor for every write. log.end is better because it asks node to close immediately after the write.
var fs = require('fs');
var logStream = fs.createWriteStream('log.txt', {flags: 'a'});
// use {flags: 'a'} to append and {flags: 'w'} to erase and write a new file
logStream.write('Initial line...');
logStream.end('this is the end line');
Besides appendFile, you can also pass a flag in writeFile to append data to an existing file.
fs.writeFile('log.txt', 'Hello Node', {'flag':'a'}, function(err) {
if (err) {
return console.error(err);
}
});
By passing flag 'a', data will be appended at the end of the file.
Use a+ flag to append and create a file (if doesn't exist):
fs.writeFile('log.txt', 'Hello Node', { flag: "a+" }, (err) => {
if (err) throw err;
console.log('The file is created if not existing!!');
});
Docs: https://nodejs.org/api/fs.html#fs_file_system_flags
You need to open it, then write to it.
var fs = require('fs'), str = 'string to append to file';
fs.open('filepath', 'a', 666, function( e, id ) {
fs.write( id, 'string to append to file', null, 'utf8', function(){
fs.close(id, function(){
console.log('file closed');
});
});
});
Here's a few links that will help explain the parameters
open
write
close
EDIT: This answer is no longer valid, look into the new fs.appendFile method for appending.
My approach is rather special. I basically use the WriteStream solution but without actually 'closing' the fd by using stream.end(). Instead I use cork/uncork. This got the benefit of low RAM usage (if that matters to anyone) and I believe it's more safe to use for logging/recording (my original use case).
Following is a pretty simple example. Notice I just added a pseudo for loop for showcase -- in production code I am waiting for websocket messages.
var stream = fs.createWriteStream("log.txt", {flags:'a'});
for(true) {
stream.cork();
stream.write("some content to log");
process.nextTick(() => stream.uncork());
}
uncork will flush the data to the file in the next tick.
In my scenario there are peaks of up to ~200 writes per second in various sizes. During night time however only a handful writes per minute are needed. The code is working super reliable even during peak times.
Node.js 0.8 has fs.appendFile:
fs.appendFile('message.txt', 'data to append', (err) => {
if (err) throw err;
console.log('The "data to append" was appended to file!');
});
Documentation
Using fs.appendFile or fsPromises.appendFile are the fastest and the most robust options when you need to append something to a file.
In contrast to some of the answers suggested, if the file path is supplied to the appendFile function, It actually closes by itself. Only when you pass in a filehandle that you get by something like fs.open() you have to take care of closing it.
I tried it with over 50,000 lines in a file.
Examples :
(async () => {
// using appendFile.
const fsp = require('fs').promises;
await fsp.appendFile(
'/path/to/file', '\r\nHello world.'
);
// using apickfs; handles error and edge cases better.
const apickFileStorage = require('apickfs');
await apickFileStorage.writeLines(
'/path/to/directory/', 'filename', 'Hello world.'
);
})();
Ref: https://github.com/nodejs/node/issues/7560
If you want an easy and stress-free way to write logs line by line in a file, then I recommend fs-extra:
const os = require('os');
const fs = require('fs-extra');
const file = 'logfile.txt';
const options = {flag: 'a'};
async function writeToFile(text) {
await fs.outputFile(file, `${text}${os.EOL}`, options);
}
writeToFile('First line');
writeToFile('Second line');
writeToFile('Third line');
writeToFile('Fourth line');
writeToFile('Fifth line');
Tested with Node v8.9.4.
fd = fs.openSync(path.join(process.cwd(), 'log.txt'), 'a')
fs.writeSync(fd, 'contents to append')
fs.closeSync(fd)
I offer this suggestion only because control over open flags is sometimes useful, for example, you may want to truncate it an existing file first and then append a series of writes to it - in which case use the 'w' flag when opening the file and don't close it until all the writes are done. Of course appendFile may be what you're after :-)
fs.open('log.txt', 'a', function(err, log) {
if (err) throw err;
fs.writeFile(log, 'Hello Node', function (err) {
if (err) throw err;
fs.close(log, function(err) {
if (err) throw err;
console.log('It\'s saved!');
});
});
});
Using jfile package :
myFile.text+='\nThis is new line to be appended'; //myFile=new JFile(path);
Try to use flags: 'a' to append data to a file
var stream = fs.createWriteStream("udp-stream.log", {'flags': 'a'});
stream.once('open', function(fd) {
stream.write(msg+"\r\n");
});
Here's a full script. Fill in your file names and run it and it should work!
Here's a video tutorial on the logic behind the script.
var fs = require('fs');
function ReadAppend(file, appendFile){
fs.readFile(appendFile, function (err, data) {
if (err) throw err;
console.log('File was read');
fs.appendFile(file, data, function (err) {
if (err) throw err;
console.log('The "data to append" was appended to file!');
});
});
}
// edit this with your file names
file = 'name_of_main_file.csv';
appendFile = 'name_of_second_file_to_combine.csv';
ReadAppend(file, appendFile);
const inovioLogger = (logger = "") => {
const log_file = fs.createWriteStream(__dirname + `/../../inoviopay-${new Date().toISOString().slice(0, 10)}.log`, { flags: 'a' });
const log_stdout = process.stdout;
log_file.write(logger + '\n');
}
In addition to denysonique's answer, sometimes asynchronous type of appendFile and other async methods in NodeJS are used where promise returns instead of callback passing. To do it you need to wrap the function with promisify HOF or import async functions from promises namespace:
const { appendFile } = require('fs').promises;
await appendFile('path/to/file/to/append', dataToAppend, optionalOptions);
I hope it'll help 😉
I wrapped the async fs.appendFile into a Promise-based function. Hope it helps others to see how this would work.
append (path, name, data) {
return new Promise(async (resolve, reject) => {
try {
fs.appendFile((path + name), data, async (err) => {
if (!err) {
return resolve((path + name));
} else {
return reject(err);
}
});
} catch (err) {
return reject(err);
}
});
}
I'm using Fs to save some data in a file. I start by having 0 in the first line and 0 in the second line. Then I want to add by one the second line. And sometimes after some tries (random, sometimes 100 and sometimes 700) something happens leading to first empty line and NaN in the second one. The code:
const Discord = require("discord.js");
const fs = require('fs');
module.exports = {
name: "testkom",
aliases: [],
async execute(message, args, client) {
var data = fs.readFileSync('peakcommands.txt').toString().split("\n");
data[1]++;
var text = data.join("\n");
fs.writeFile('peakcommands.txt', text, function (err) {
if (err) return console.log(err);
});
message.channel.send (data[1]);
}
}
I think it could either be an encoding issue or possibly a data race.
To fix the possible race condition, switch fs.writeFile to fs.writeFileSync, which will assure that the file write is synchronous.
To fix the encoding issue, you can specify the encoding, like this:
fs.writeFileSync('peakcommands.txt', 'utf8' text, function (err) {
if (err) return console.log(err);
});
If nothing works, check the docs.
i have this script, who i run in a program called discord bot maker. I'm trying to give the bot the possibility to search for a word in a txt file, then remove this word and save the file :
let fs = require('fs');
let a = tempVars('a');
let b = tempVars('carte');
fs.readFile(`resources/${a}.txt`, { encoding: 'utf-8' }, (err, data) => {
if (err) throw err;
let dataArray = data.split('\n'); // convert file data in an array
const searchKeyword = `${b}`; // we are looking for a line, contains, key word 'user1' in the file
const key = dataArray.filter((arr) => arr.includes(searchKeyword));
const index = key.length >= 1 && dataArray.indexOf(key[0]);
if (index > -1) dataArray.splice(index, 1);
// UPDATE FILE WITH NEW DATA
// IN CASE YOU WANT TO UPDATE THE CONTENT IN YOUR FILE
// THIS WILL REMOVE THE LINE CONTAINS 'user1' IN YOUR shuffle.txt FILE
const updatedData = dataArray.join('\n');
fs.writeFile(`resources/${a}.txt`, updatedData, (writeErr) => {
if (writeErr) throw err;
console.log('Successfully updated the file data');
});
});
the tempVars("xx") variables are given by a program named discord bot maker, so no trouble with that.
My problem is that when the var "b" (who is the parameter of a command in discord) doesnt exist in the txt file, the script delete the first word in the file !
How can i add a condition to this script (If b dont exist in file, stop the script and return a message)
thank you very mutch guys ! have a good day
You can use use replace method without converting file into arrays.
let fs = require('fs');
let a = tempVars('a');
let b = tempVars('carte');
fs.readFile(`resources/${a}.txt`, { encoding: 'utf-8' }, (err, data) => {
if (err) throw err;
const updatedData = data.replace(b, '');
fs.writeFile(`resources/${a}.txt`, updatedData, (writeErr) => {
if (writeErr) throw err;
console.log('Successfully updated the file data');
});
});
This method will replace only first matched word. In case, if you want to replace all matched word, as a first parameter, you can use Regular Expression.
Char g is stands for global, m is for multi-line.
const regex = new RegExp(b, 'gm')
data.replace(regex, '');
In case you want to test if the file contains requested word, you can use .includes function and if statement.
if (!data.includes(b)) {
console.log("Requested word is not present in file")
// Your logic here
return
}
I am trying to find and replace all text in between comments in my react component using regex.
This is an example of the react component with comments:
<Text>{/*text1*/}hey hey hey{/*text1*/}</Text>
I am able to replace the text if I change {/*text1*/} to {text1}. The issue is whenever I try to add the /* */ part of the comment into my regex it keeps adding an extra backslash.
Below is my current function that works if I remove the /* and */ from the comment in my component:
async function doIt(fileName, location, replacementText) {
let tempRegex = "(?<={" + location + "})(.*)(?={" + location + "})"
let regex = new RegExp(tempRegex)
await fs.readFile(path.join(__dirname, fileName), 'utf8', function (err, data) {
if (err) throw err
content = data
content = content.replace(regex, replacementText)
fs.writeFile(path.join(__dirname, fileName), content, 'utf8', function (err) {
if (err) throw err
console.log('FILE SAVED')
})
})
}
doIt('change.js', "text1", 'test test')
My idea was to change my tempRegex to:
"(?<={/*" + location + "*/})(.*)(?={/*" + location + "*/})"
but it keeps adding extra characters and looks like this:
/(?<={\/*textBox1*\/})(.*)(?={\/*textBox1*\/})/
Is there a way to add forward slashes without it adding an extra backslash?
The forward slash / is a special character in regular expressions, because it denotes the beginning and the end of them. In case you want to target a forward slash inside your RegExp you have to escape it. That's what the backslashes do.
So this regular expression is invalid:
/{//
Whereas this one is valid:
/{\//
I just realized my issue was that new RegExp was not escaping the asterisk. This was my final solution. I had to add 2 backslashes in front of the asterisk for it to work.
export default function updateText(fileName, location, replacementText) {
let tempRegex = "(?<={/\\*" + location + "\\*/})(.*)(?={/\\*" + location + "\\*/})"
let regex = new RegExp(tempRegex)
fs.readFile(path.join(__dirname, fileName), 'utf8', function (err, data) {
if (err) throw err
content = data
content = content.replace(regex, replacementText)
fs.writeFile(path.join(__dirname, fileName), content, 'utf8', function (err) {
if (err) throw err
console.log('FILE SAVED')
})
})
}