In my current codes, it does only can read a text file, How can I make an Image (base64) file opened with Photos Application (Windows)? Is there any chance to do that? If it's impossible, please let me know!
const fs = require('fs')
fs.readFile('./Test/a.txt', 'utf8' , (err, data) => {
if (err) {
console.error(err)
return
}
console.log(data)
return
})
Another possible solution is like this:
const cp = require('child_process');
const imageFilePath = '/aaa/bbb/ccc'
const c = cp.spawn('a_program_that_opens_images', [ `"${imageFilePath}"` ]);
c.stdout.pipe(process.stdout);
c.stderr.pipe(process.stderr);
c.once('exit', exitCode => {
// child process has exited
});
Do something like this:
const cp = require('child_process');
const c = cp.spawn('bash'); // 1
const imageFilePath = '/aaa/bbb/ccc'
c.stdin.end(`
program_that_opens_images "${imageFilePath}"
`); // 2
c.stdout.pipe(process.stdout); // 3
c.stderr.pipe(process.stderr);
c.once('exit', exitCode => { // 4
// child process has exited
});
what it does:
spawns a bash child process (use sh or zsh instead if you want)
writes to bash stdin, (inputting the command to run)
pipes the stdio from the child to the parent
captures the exit code from the child
I have a large CSV file of postcode data (~1.1GB), I am trying to filter out the data I need and then write an array of values to a JS file.
The issue is, that i'm always using too much memory and receiving this error:
Ineffective mark-compacts near heap limit Allocation failed - JavaScript heap out of memory
I have tried increasing the memory using this command: node --max-old-space-size=4096 fileName.js but I still hit my memory limit, it just takes longer!
Here is my code to write to the JS
const csvFilePath = "./data/postcodes.csv";
const csv = require("csvtojson");
const fs = require("fs");
csv()
.fromFile(csvFilePath)
.then((jsonArray) => {
const inUsePostcodes = jsonArray.filter((x) => x["In Use?"] === "Yes").map((y) => y.Postcode);
fs.writeFileSync("postcodes.js", inUsePostcodes);
});
Here is a sample of postcodes.csv:
Postcode,In Use?,Latitude,Longitude,Easting,Northing,Grid Ref,County,District,Ward,District Code,Ward Code,Country,County Code,Constituency,Introduced,Terminated,Parish,National Park,Population,Households,Built up area,Built up sub-division,Lower layer super output area,Rural/urban,Region,Altitude,London zone,LSOA Code,Local authority,MSOA Code,Middle layer super output area,Parish Code,Census output area,Constituency Code,Index of Multiple Deprivation,Quality,User Type,Last updated,Nearest station,Distance to station,Postcode area,Postcode district,Police force,Water company,Plus Code,Average Income
AB1 0AA,No,57.101474,-2.242851,385386,801193,NJ853011,"","Aberdeen City","Lower Deeside",S12000033,S13002843,Scotland,S99999999,"Aberdeen South",1980-01-01,1996-06-01,"","",,,"","","Cults, Bieldside and Milltimber West - 02","Accessible small town",,46,,S01006514,,S02001237,"Cults, Bieldside and Milltimber West",,S00090303,S14000002,6808,1,0,2020-02-19,"Portlethen",8.31408,AB,AB1,"Scotland","Scottish Water",9C9V4Q24+HV,
AB1 0AB,No,57.102554,-2.246308,385177,801314,NJ851013,"","Aberdeen City","Lower Deeside",S12000033,S13002843,Scotland,S99999999,"Aberdeen South",1980-01-01,1996-06-01,"","",,,"","","Cults, Bieldside and Milltimber West - 02","Accessible small town",,61,,S01006514,,S02001237,"Cults, Bieldside and Milltimber West",,S00090303,S14000002,6808,1,0,2020-02-19,"Portlethen",8.55457,AB,AB1,"Scotland","Scottish Water",9C9V4Q33+2F,
AB1 0AD,No,57.100556,-2.248342,385053,801092,NJ850010,"","Aberdeen City","Lower Deeside",S12000033,S13002843,Scotland,S99999999,"Aberdeen South",1980-01-01,1996-06-01,"","",,,"","","Cults, Bieldside and Milltimber West - 02","Accessible small town",,45,,S01006514,,S02001237,"Cults, Bieldside and Milltimber West",,S00090399,S14000002,6808,1,0,2020-02-19,"Portlethen",8.54352,AB,AB1,"Scotland","Scottish Water",9C9V4Q22+6M,
How can I write to the JS file from this CSV, without hitting my memory limit?
You need a csv stream parser that will parse it and provide output a line at a time and let you stream it to a file.
Here's one way to do it using the cvs-reader module:
const fs = require('fs');
const csvReader = require('csv-reader');
const { Transform } = require('stream');
const myTransform = new Transform({
readableObjectMode: true,
writableObjectMode: true,
transform(obj, encoding, callback) {
let data = JSON.stringify(obj);
if (this.tFirst) {
// beginning of transformed data
this.push("[");
this.tFirst = false;
} else {
data = "," + data; // add comma separator if not first object
}
this.push(data);
callback();
}
});
myTransform.tFirst = true;
myTransform._flush = function(callback) {
// end of transformed data
this.push("]");
callback();
}
// All of these arguments are optional.
const options = {
skipEmptyLines: true,
asObject: true, // convert data to object
parseNumbers: true,
parseBooleans: true,
trim: true
};
const csvStream = new csvReader(options);
const readStream = fs.createReadStream('example.csv', 'utf8');
const writeStream = fs.createWriteStream('example.json', {autoClose: false});
readStream.on('error', err => {
console.log(err);
csvStream.destroy(err);
}).pipe(csvStream).pipe(myTransform).pipe(writeStream).on('error', err => {
console.error(err);
}).on('finish', () => {
console.log('done');
});
The issue is that the csvtojson node module is trying to store this massive jsonObj in memory!
I found a different solution which involves using the csv-parser node module and then just parsed one row at a time instead of the whole csv!
Here is my solution:
const csv = require('csv-parser');
const fs = require('fs');
var stream = fs.createWriteStream("postcodes.js", {flags:'a'});
let first = false;
fs.createReadStream('./data/postcodes.csv')
.pipe(csv())
.on('data', (row) => {
if (row["In Use?"]) {
if (!first) {
first = true;
stream.write(`const postcodes = ["${row.Postcode},\n"`);
} else {
stream.write(`"${row.Postcode},\n"`);
}
}
})
.on('end', () => {
stream.write("]");
console.log('CSV file successfully processed');
});
It's not very pretty writing strings like const postcodes = to represent JavaScript, but it performs the desired function.
I am running a script which looks into a directory and lists files, then checks for the file type to process, if the extension matches then the file is read and each line of the file (.col which is just a txt file renamed) is inserted into an array.
Now after the file is read and the array is populated I would like to use the array and do some further processing, e.g create a db record. I am missing something really basic here because on each console log I do as below I always get the full items (in my array) of the contents of all files.
So to make it a bit simpler:
array is empty.
Then file is read and processed and array now has
array[0]=line 0 of file
array[0]=line 1 of file etc
const fs = require('fs');
const readline =require('readline');
var files = fs.readdirSync('/home/proj/data');
var path = require('path');
var model=[];
var lineReader=[];
for(var i=0; i<files.length; i++) {
if(path.extname(files[i]) === ".col") {
lineReader[i] = readline.createInterface({
input: require('fs').createReadStream(files[i])
});
lineReader[i].on('line', function (line) {
model.push(line);
}).on('close', async function() {
console.log(model);
});
}
}
Instead the script is run and array[] holds all lines of all files that match the extension.
Your help is greatly appreciated and anyone is allowed to scorch my JS as I am pretty sure I am missing something basic here.
So, you want to read the files in parallel (because that's what your program does) and put it in an array of arrays?
You can make the reading file mechanism a promise and use it using Promise.all. Here is an example to get you started.
const fs = require('fs');
const readline = require('readline');
var files = fs.readdirSync('./');
var path = require('path');
function readFile(fileName) {
return new Promise(resolve => {
const array = [];
const lineReader = readline.createInterface({
input: fs.createReadStream(files[i])
});
lineReader.on('line', function (line) {
array.push(line);
}).on('close', async function () {
//do some proc
console.log(array);
resolve(array);
});
});
}
const readFilePromises = [];
for (var i = 0; i < files.length; i++) {
if (path.extname(files[i]) === ".js") {
readFilePromises.push(readFile(files[i]));
}
}
Promise.all(readFilePromises) //or await Promise.all([..])
.then(data => {
console.log(data);//will be array of arrays
})
If you want a single Array you can always flatten the result using data.flat()
If your files are not very big and sync methods are OK, you can simplify the code this way:
'use strict';
const fs = require('fs');
const path = require('path');
const model = [];
fs.readdirSync('/home/proj/data')
.filter(name => path.extname(name) === '.col')
.forEach((name) => {
model.push(...fs.readFileSync(name, 'utf8').split('\n'));
});
console.log(model);