In the following block of code, I am trying to get user input and save them in a json file. If the the inserted item is already exists in the file it gets rejected. The issue here is that my programme always executes the catch clause and generate an empty array, although in some situations json file exists and it is ready to be read.
const fs = require('fs');
const loadFile = () => {
try{
const stringData = fs.readFileSync('note-data.json', 'utf8');
return [JSON.parse(stringData)];
} catch (e) {
return [];
}
};
const writeFile = (notes) => {
fs.writeFileSync('note-data.json', JSON.stringify(notes));
};
const addNote = (title, body) => {
let notes = loadFile();
const note = {
title,
body
}
console.log(notes);
const duplicateArray = notes.filter((note) => note.title === title);
console.log(duplicateArray);
if(duplicateArray.length === 0){
notes.push(note);
writeFile(notes);
}
};
The command line input is as follow:
node app.js add --title=Greeting --body="hello"
node app.js add --title=Greeting2 --body="hello2"
The output is:
[{"title":"Greeting","body":"hello"}]
The output should be:
[{"title":"Greeting","body":"hello"}, {"title":"Greeting2","body":"hello2"}]
My question is where this error occurs?
Typo?
fs.writeFileSync('note-date.json', JSON.stringify(notes));
Isn't the file 'note-data.json'?
Related
I have two js scripts that I would like to merge into one, but I do not know how.
Script one, uploads all files inside specified folder into virustotal, scans them, and returns the result of the scan.
Script two, lists all files inside the specified folder and all of its subfolders.
I would like to make a script that uploads all files inside specified folder and all of its subfolders into virustotal, scans them, and returns the result of the scan.
How would I go about doing that?
Script one:
/*jshint esversion: 8 */
const path = require('path');
const fsp = require('fs').promises;
const VirusTotalApi = require("virustotal-api");
const virusTotal = new VirusTotalApi('<YOUR API KEY>');
const basePath = '/home/username/Desktop/TEST/';
const wait = (time) => new Promise((resolve) => setTimeout(resolve, time));
async function scan() {
const files = await fsp.readdir(basePath);
let errors = [];
for (let file of files) {
const fullPath = path.join(basePath, file);
console.log(file);
try {
const data = await fsp.readFile(fullPath);
const response = await virusTotal.fileScan(data, file);
const resource = response.resource;
const result = await virusTotal.fileReport(resource);
const resultLine = `${file}: ${JSON.stringify(result, ["verbose_msg","total","positives"])}\n`;
await fsp.appendFile('Result.txt', resultLine);
console.log(`${file}: Saved!`);
} catch (e) {
// collect the error, log the error and continue the loop
e.fullPath = fullPath;
errors.push(e);
console.log(`Error processing ${fullPath}`, e);
continue;
}
// Wait for 30 seconds
await wait(30000);
}
// if there was an error, then reject with all the errors we got
if (errors.length) {
let e = new Error("Problems scanning files");
e.allErrors = errors;
throw e;
}
}
scan().then(() => {
console.log("all done scanning - no errors");
}).catch(err => {
console.log(err);
});
Script two:
const { promisify } = require('util');
const { resolve } = require('path');
const fs = require('fs');
const readdir = promisify(fs.readdir);
const stat = promisify(fs.stat);
async function getFiles(dir) {
const subdirs = await readdir(dir);
const files = await Promise.all(subdirs.map(async (subdir) => {
const res = resolve(dir, subdir);
return (await stat(res)).isDirectory() ? getFiles(res) : res;
}));
return files.reduce((a, f) => a.concat(f), []);
}
getFiles('/home/username/Desktop/TEST')
.then(files => console.log(files))
.catch(e => console.error(e));
You have quite a few options to get to a result here. The quick and dirty approach is to:
eliminate naming conflicts (make sure nothing is named the same between the two files
Copy over the consts and the function in file B into file A.
Copy the getFiles call in right after the scan().then... call
There are other cleaner approaches. But this should get you to a proof of concept that it is possible to have both scripts function together in a single script.
I am getting strings from a text file in Javascript.
I just want to show a message whenever the user enters the wrong file name or invalid file name.
like this: console.log("You input is invalid");
My code to read my text file and transfer the data to my variable is as follow:
const fs = require('fs');
var strings = [];
strings = fs.readFileSync('abc.txt','utf8');
I don't know how to check the existence of the file and put it in an if-else statement.
You can use existsSync
const fs = require('fs');
const filePath = './file.txt';
try {
if (fs.existsSync( filePath )) {
// TASK TO PERFORM IF FILE EXISTS
console.log("File exists.");
} else {
// TASK TO PERFORM IF FILE DOESN'T EXISTS
console.log("Your input is invalid");
}
} catch(err) {
console.error(err);
}
You can use bellow code to check existence of file:
const fs = require('fs');
const filePath = 'abc.txt';
try {
if(fs.existsSync(filePath)) {
// exist
} else {
// not found
}
} catch (error) {
console.log(error);
}
I am reading a json file from within a zip file using jszip. I can open it and get the information that I want into the console from my function. I can't however get that information out of my javascript function. Maybe I am doing it wrong. Don't usually code using js.
const JSZip = require("jszip");
const fs = require("fs");
var myReturn;
function readJsons(bookPath,bookName,fileName,filePath,keyOption){
fs.readFile(bookPath + bookName, function(err, data) {
if (err) throw err;
JSZip.loadAsync(data).then(function (zip) {
// Read the contents of the '.txt' file
zip.file(filePath + fileName).async("string").then(function (data) {
var mydata = JSON.parse(data);
//gets the value of the key entered
myReturn = JSON.stringify(mydata[0][keyOption]); //value here should be "test book"
console.log(myReturn); //printed in console is "test book" works to here
return myReturn;
});
});
});
}
console.log(readJsons('simplelbook.zip','','frontMatter.json','','bookName'));
The problem is that you are returning inside the callback, so you aren't returning in the actual function. The solution would be using async/await instead:
const JSZip = require("jszip");
const fs = require("fs");
const util = require("util"); // require the util module
const readFile = util.promisify(fs.readFile); // transform fs.readFile into a more compatible form
async function readJsons(bookPath, bookName, fileName, filePath, keyOption) {
try {
// this part does the same thing, but with different syntax
const data = await readFile(bookPath + bookName);
const zip = await JSZip.loadAsync(data);
const jsonData = await zip.file(filePath + fileName).async("string");
const mydata = JSON.parse(jsonData);
const myReturn = JSON.stringify(mydata[0][keyOption]);
return myReturn; // return the data, simple as that
} catch (e) {
console.error(e); // error handling
}
}
(async () => { // self executing async function so we can use await
console.log(
await readJsons("simplelbook.zip", "", "frontMatter.json", "", "bookName")
);
})()
Notice I have imported the util module to turn fs.readFile into a function that is more suited for async/await :)
I have an script that reads the file and compares the string by a pattern, if it returns false it will delete the line on the .txt file.
This is my code
const readline = require('readline');
const lineReplace = require('line-replace')
const fs = require('fs');
const inputFileName = './outputfinal.txt';
const readInterface = readline.createInterface({
input: fs.createReadStream(inputFileName),
});
let testResults = [];
readInterface.on('line', line => {
testResult = test(line);
console.log(`Test result (line #${testResults.length+1}): `, testResult);
testResults.push({ input: line, testResult } );
if (testResult == false){
console.log(`Line #${testResults.length} will get deleted from this list`);
lineReplace({
file: './outputfinal.txt',
line: testResults.length,
text: '',
addNewLine: false,
callback: onReplace
});
function onReplace({file, line, text, replacedText}) {
};
};
});
// You can do whatever with the test results here.
//readInterface.on('close', () => {
// console.log("Test results:", testResults);
//});
function test(str){
let regex = /^\w+([\.-]?\w+)*#\w+([\.-]?\w+)*(\.\w{2,3})+$/; // email regex
str = str.split(",");
// string should be of length 3 with str[1] number of length 7
if(str && str.length === 3 && Number(str[1]) && str[1] ) {
let temp = str[0].split("-");
// check for 85aecb80-ac00-40e3-813c-5ad62ee93f42 separately.
if(temp && temp.length === 5 && /[a-zA-Z\d]{8}/.test(temp[0]) && /[a-zA-Z\d]{4}/.test(temp[1]) && /[a-zA-Z\d]{4}/.test(temp[2]) && /[a-zA-Z\d]{4}/.test(temp[3]) && /[a-zA-Z\d]{12}/.test(temp[4])){
// email regex
if(regex.test(str[2])) {
return true;
} else {
return false;
}
} else {
return false
}
} else {
return false;
}
}
But isn't working, returns error no such file or directory, I dont think that is the correct way to do a line remover script
First, if the error is "no such file or directory" is because the file doesn't exist. Please check the file exists at first in the same root of your project.
Second, don't use the library "line-replace", if you check the code this create a tmp file and rewrite all the file in a tmp with the replacement. When finish the process the tmp file is renamed to the original.
Third, if you analyze the code the "lineReplace" is async. So there will sometimes to try to open multiple times the file at the same time and consequently write it at same time again. This will create an unsuspected result.
The best recommendation is you must see how File works and Promises (async) in nodejs:
https://nodejs.org/api/fs.html
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
https://itnext.io/javascript-promises-with-node-js-e8ca827e0ea3
If you see the next code, you will see the next steps:
Create the tmp Route
Create the tmp File
Create a promise:
Create a readline interface
Process each line with a try catch to reject in case of error
When finish the process replace the tmp file to original file with a try-catch to reject in case of error
Wait to finish the promise, in case of error delete the tmp file
const fs = require('fs');
const readline = require('readline');
async function replaceLineWithConditional(pathFile, conditional) {
// tmpFile name
const tmpFilePath = `${pathFile}.tmp`;
// Create write stream
const tmpStream = fs.createWriteStream(tmpFilePath);
// Process it
const processFile = new Promise((resolve, reject) => {
const rl = readline.createInterface({
input: fs.createReadStream(pathFile),
});
// Process line
rl.on("line", (input) => {
try {
if (conditional(input)) {
tmpStream.write(input); // input
tmpStream.write("\n"); // linejump
}
} catch (err) {
// Reject error
reject(err);
}
});
// Finish
rl.on("close", () => {
try {
// Move the tmpFile
tmpStream.close();
fs.renameSync(tmpFilePath, pathFile);
// Resolve it
resolve(true);
} catch (err) {
// Reject error
reject(err);
}
});
});
try {
// Await the promise
return await processFile;
} catch (err) {
// Delete the tmp file and throw the error
tmpStream.close();
fs.unlinkSync(tmpFilePath);
throw err;
}
}
So you could call the function with your conditional function process as a callback. For example, I want to keep it all the lines who have a length more than 3 and not start with "a":
// async/await:
await replaceLineWithConditional("./test.txt", (line) => {
return line.length > 3 && /^[^a]/.test(line);
});
// then/catch:
replaceLineWithConditional("./test.txt", (line) => {
return line.length > 3 && /^[^a]/.test(line);
}).then(...).catch(...);
input:
Hi
Hello
abcdef
a
lalalal
output:
Hello
lalalal
If you want the file don't finish with the end line. (take a note: Why should text files end with a newline?) this can be a Quiz problem to test knowledge in the fs library :)
Imagine you have many long text files, and you need to only extract data from the first line of each one (without reading any further content). What is the best way in Node JS to do it?
Thanks!
I ended up adopting this solution, which seems the most performant I've seen so far:
var fs = require('fs');
var Q = require('q');
function readFirstLine (path) {
return Q.promise(function (resolve, reject) {
var rs = fs.createReadStream(path, {encoding: 'utf8'});
var acc = '';
var pos = 0;
var index;
rs
.on('data', function (chunk) {
index = chunk.indexOf('\n');
acc += chunk;
index !== -1 ? rs.close() : pos += chunk.length;
})
.on('close', function () {
resolve(acc.slice(0, pos + index));
})
.on('error', function (err) {
reject(err);
})
});
}
I created a npm module for convenience, named "firstline".
Thanks to #dandavis for the suggestion to use String.prototype.slice()!
There's a built-in module almost for this case - readline. It avoids messing with chunks and so forth. The code would look like the following:
const fs = require('fs');
const readline = require('readline');
async function getFirstLine(pathToFile) {
const readable = fs.createReadStream(pathToFile);
const reader = readline.createInterface({ input: readable });
const line = await new Promise((resolve) => {
reader.on('line', (line) => {
reader.close();
resolve(line);
});
});
readable.close();
return line;
}
I know this doesn't exactly answer the question but for those who are looking for a READABLE and simple way to do so:
const fs = require('fs').promises;
async function getFirstLine(filePath) {
const fileContent = await fs.readFile(filePath, 'utf-8');
return (fileContent.match(/(^.*)/) || [])[1] || '';
}
NOTE:
naturaly, this will only work with text files, which I assumed you used from your description
this will work with empty files and will return an empty string
this regexp is very performant since it is simple (no OR conditions`or complex matches) and only reads the first line
Please try this:
https://github.com/yinrong/node-line-stream-util#get-head-lines
It unpipe the upstream once got the head lines.
Node.js >= 16
In all current versions of Node.js, readline.createInterface can be used as an async iterable, to read a file line by line - or just for the first line. This is also safe to use with empty files.
Unfortunately, the error handling logic is broken in versions of Node.js before 16, where certain file system errors may go uncaught even if the code is wrapped in a try-catch block because of the way asynchronous errors are propagated in streams. So I recommend using this method only in Node.js >= 16.
import { createReadStream } from "fs";
import { createInterface } from "readline";
async function readFirstLine(path) {
const inputStream = createReadStream(path);
try {
for await (const line of createInterface(inputStream)) return line;
return ''; // If the file is empty.
}
finally {
inputStream.destroy(); // Destroy file stream.
}
}
const firstLine = await readFirstLine("path/to/file");
//Here you go;
var lineReader = require('line-reader');
var async = require('async');
exports.readManyFiles = function(files) {
async.map(files,
function(file, callback))
lineReader.open(file, function(reader) {
if (reader.hasNextLine()) {
reader.nextLine(function(line) {
callback(null,line);
});
}
});
},
function(err, allLines) {
//do whatever you want to with the lines
})
}