UPDATE
I have continued to work through this and have the following code - st
async generatejobs() {
const fs = require("fs");
const path = require("path");
const directoryPath = path.join(__dirname, "../data/pjo/in");
fs.readdir(directoryPath, function (err, files) {
if (err) {
console.log("Error getting directory information." + err);
} else {
files.forEach(function (file) {
console.log(file);
fs.readFile(file, (err, data) => {
console.log(file); // this works, if I stop here
// if (err) throw err;
// let newJson = fs.readFileSync(data);
// console.log(newJson);
})
// let data = fs.readFileSync(file);
// let obj = JSON.parse(data);
// let isoUtc = new Date();
// let isoLocal = toISOLocal(isoUtc);
// obj.printingStart = isoLocal;
// obj.printingEnd = isoLocal;
// let updatedFile = JSON.stringify(obj);
// let write = fs.createWriteStream(
// path.join(__dirname, "../data/pjo/out", updatedFile)
// );
// read.pipe(write);
});
}
});
As soon as I try uncomment the line shown below, it fails.
let newJson = fs.readFileSync(data);
The error I am getting is this.
Uncaught ENOENT: no such file or directory, open 'C:\projects\codeceptJs\ipt\80-012345.json'
This is a true statement as the path should be as follows.
'C:\projects\codeceptJs\ipt\src\data\pjo\in\80-012345.json'
I do not understand why it is looking for the file here given that earlier in the code the path is set and seems to work correctly for finding the file via this.
const directoryPath = path.join(__dirname, "../data/pjo/in");
The remainder of the code which is currently commented out is where I am attempting to do the following.
Grab each file from source dir
put into json object
Update the json object to change two date entries
Save to a new json file / new location in my project
Original Post
I have a codeceptjs test project and would like to include a set of existing json files in my project (src/data/jsondata/in) and then update the date attribute within each and write them to an output location in my project (src/data/jsondata/out). I need to change the date and then get it back into a very specific string format, which I have done and then insert this back into the new json being created. I got this about 80% of the way there and then ran into issues when trying to get the files from one folder within my project to another.
I broke this up in to two parts.
function to take a date and convert it to the date string I need
function to grab the source json, update the date, and make a new json at a new folder location
Number 1 is working as it should. Number 2 is not.
If there is a better way to accomplish this, I am very much open to that.
Here is the code where I'm trying to update the json. The main issue here is I'm not understanding and / or handling correctly the join path stuff.
generatePressJobs() {
//requiring path and fs modules
const path = require('path');
const fs = require('fs');
//joining path of directory
const directoryPath = path.join(__dirname, '../', 'data/pjo/in/');
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
I.say('unable to scan directory: ' + err);
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
files.forEach(function (file) {
// Update each file with new print dates
let data = fs.readFileSync(file);
let obj = JSON.parse(data);
let isoUtc = new Date();
let isoLocal = toISOLocal(isoUtc);
obj.printingStart = isoLocal;
obj.printingEnd = isoLocal;
let updatedFile = JSON.stringify(obj);
fs.writeFile(`C:\\projects\\csPptr\\ipt\\src\\data\\pjo\\out\\${file}`, updatedFile, (err) => {
if (err) {
throw err;
}
});
});
});
},
Error received
Uncaught ENOENT: no such file or directory, open '80-003599.json'
at Object.openSync (fs.js:462:3)
at Object.readFileSync (fs.js:364:35)
at C:\projects\codeceptJs\ipt\src\pages\Base.js:86:23
at Array.forEach (<anonymous>)
at C:\projects\codeceptJs\ipt\src\pages\Base.js:84:13
at FSReqCallback.oncomplete (fs.js:156:23)
The function to generate the json is located in src/pages/basePage.js
The folder structure I've built for the json file is located in
src/data/jsondata/in --> for original source files
src/data/jsondata/out --> for resulting json after change
Any insight or suggestions would be hugely appreciated.
Thank you,
Bob
My approach / resolution
Passing along the final approach I took in the event this is helpful to anyone else. The data in the middle was specific to my requirements, but left in to show the process I took to do what I needed to do.
async generatePressjobs(count) {
const fs = require("fs");
const path = require("path");
const sourceDirectoryPath = path.join(__dirname, "../data/pjo/in/");
const destDirectoryPath = path.join(__dirname, "../data/pjo/out/");
for (i = 0; i < count; i++) {
// read file and make object
let content = JSON.parse(
fs.readFileSync(sourceDirectoryPath + "source.json")
);
// Get current date and convert to required format for json file
let isoUtc = new Date();
let isoLocal = await this.toISOLocal(isoUtc);
let fileNameTimeStamp = await this.getFileNameDate(isoUtc);
// Get current hour and minute for DPI time stamp
let dpiDate = new Date;
let hour = dpiDate.getHours();
let minute = dpiDate.getMinutes();
dpiStamp = hour + '' + minute;
// update attributes in the json obj
content.batchid = `80-0000${i}`;
content.id = `80-0000${i}-10035-tcard-${dpiStamp}-0101010000_.pdf`
content.name = `80-0000${i}-8.5x11CALJEF-CalBody-${dpiStamp}-01010100${i}_.pdf`;
content.printingStart = isoLocal;
content.printingEnd = isoLocal;
// write the file
fs.writeFileSync(
destDirectoryPath + `80-0000${i}-SOME-JOB-NAME-${dpiStamp}.pdf_Press Job printing end_${fileNameTimeStamp}.json`,
JSON.stringify(content)
);
}
},
I have a large CSV file of postcode data (~1.1GB), I am trying to filter out the data I need and then write an array of values to a JS file.
The issue is, that i'm always using too much memory and receiving this error:
Ineffective mark-compacts near heap limit Allocation failed - JavaScript heap out of memory
I have tried increasing the memory using this command: node --max-old-space-size=4096 fileName.js but I still hit my memory limit, it just takes longer!
Here is my code to write to the JS
const csvFilePath = "./data/postcodes.csv";
const csv = require("csvtojson");
const fs = require("fs");
csv()
.fromFile(csvFilePath)
.then((jsonArray) => {
const inUsePostcodes = jsonArray.filter((x) => x["In Use?"] === "Yes").map((y) => y.Postcode);
fs.writeFileSync("postcodes.js", inUsePostcodes);
});
Here is a sample of postcodes.csv:
Postcode,In Use?,Latitude,Longitude,Easting,Northing,Grid Ref,County,District,Ward,District Code,Ward Code,Country,County Code,Constituency,Introduced,Terminated,Parish,National Park,Population,Households,Built up area,Built up sub-division,Lower layer super output area,Rural/urban,Region,Altitude,London zone,LSOA Code,Local authority,MSOA Code,Middle layer super output area,Parish Code,Census output area,Constituency Code,Index of Multiple Deprivation,Quality,User Type,Last updated,Nearest station,Distance to station,Postcode area,Postcode district,Police force,Water company,Plus Code,Average Income
AB1 0AA,No,57.101474,-2.242851,385386,801193,NJ853011,"","Aberdeen City","Lower Deeside",S12000033,S13002843,Scotland,S99999999,"Aberdeen South",1980-01-01,1996-06-01,"","",,,"","","Cults, Bieldside and Milltimber West - 02","Accessible small town",,46,,S01006514,,S02001237,"Cults, Bieldside and Milltimber West",,S00090303,S14000002,6808,1,0,2020-02-19,"Portlethen",8.31408,AB,AB1,"Scotland","Scottish Water",9C9V4Q24+HV,
AB1 0AB,No,57.102554,-2.246308,385177,801314,NJ851013,"","Aberdeen City","Lower Deeside",S12000033,S13002843,Scotland,S99999999,"Aberdeen South",1980-01-01,1996-06-01,"","",,,"","","Cults, Bieldside and Milltimber West - 02","Accessible small town",,61,,S01006514,,S02001237,"Cults, Bieldside and Milltimber West",,S00090303,S14000002,6808,1,0,2020-02-19,"Portlethen",8.55457,AB,AB1,"Scotland","Scottish Water",9C9V4Q33+2F,
AB1 0AD,No,57.100556,-2.248342,385053,801092,NJ850010,"","Aberdeen City","Lower Deeside",S12000033,S13002843,Scotland,S99999999,"Aberdeen South",1980-01-01,1996-06-01,"","",,,"","","Cults, Bieldside and Milltimber West - 02","Accessible small town",,45,,S01006514,,S02001237,"Cults, Bieldside and Milltimber West",,S00090399,S14000002,6808,1,0,2020-02-19,"Portlethen",8.54352,AB,AB1,"Scotland","Scottish Water",9C9V4Q22+6M,
How can I write to the JS file from this CSV, without hitting my memory limit?
You need a csv stream parser that will parse it and provide output a line at a time and let you stream it to a file.
Here's one way to do it using the cvs-reader module:
const fs = require('fs');
const csvReader = require('csv-reader');
const { Transform } = require('stream');
const myTransform = new Transform({
readableObjectMode: true,
writableObjectMode: true,
transform(obj, encoding, callback) {
let data = JSON.stringify(obj);
if (this.tFirst) {
// beginning of transformed data
this.push("[");
this.tFirst = false;
} else {
data = "," + data; // add comma separator if not first object
}
this.push(data);
callback();
}
});
myTransform.tFirst = true;
myTransform._flush = function(callback) {
// end of transformed data
this.push("]");
callback();
}
// All of these arguments are optional.
const options = {
skipEmptyLines: true,
asObject: true, // convert data to object
parseNumbers: true,
parseBooleans: true,
trim: true
};
const csvStream = new csvReader(options);
const readStream = fs.createReadStream('example.csv', 'utf8');
const writeStream = fs.createWriteStream('example.json', {autoClose: false});
readStream.on('error', err => {
console.log(err);
csvStream.destroy(err);
}).pipe(csvStream).pipe(myTransform).pipe(writeStream).on('error', err => {
console.error(err);
}).on('finish', () => {
console.log('done');
});
The issue is that the csvtojson node module is trying to store this massive jsonObj in memory!
I found a different solution which involves using the csv-parser node module and then just parsed one row at a time instead of the whole csv!
Here is my solution:
const csv = require('csv-parser');
const fs = require('fs');
var stream = fs.createWriteStream("postcodes.js", {flags:'a'});
let first = false;
fs.createReadStream('./data/postcodes.csv')
.pipe(csv())
.on('data', (row) => {
if (row["In Use?"]) {
if (!first) {
first = true;
stream.write(`const postcodes = ["${row.Postcode},\n"`);
} else {
stream.write(`"${row.Postcode},\n"`);
}
}
})
.on('end', () => {
stream.write("]");
console.log('CSV file successfully processed');
});
It's not very pretty writing strings like const postcodes = to represent JavaScript, but it performs the desired function.
I am trying to write a file uploader for Meteor framework.
The principle is to split the fileon the client from an ArrayBuffer in small packets of 4096 bits that are sent to the server through a Meteor.method.
The simplified code below is the part of the client that sends a chunk to the server, it is repeated until offset reaches data.byteLength :
// data is an ArrayBuffer
var total = data.byteLength;
var offset = 0;
var upload = function() {
var length = 4096; // chunk size
// adjust the last chunk size
if (offset + length > total) {
length = total - offset;
}
// I am using Uint8Array to create the chunk
// because it can be passed to the Meteor.method natively
var chunk = new Uint8Array(data, offset, length);
if (offset < total) {
// Send the chunk to the server and tell it what file to append to
Meteor.call('uploadFileData', fileId, chunk, function (err, length) {
if (!err) {
offset += length;
upload();
}
}
}
};
upload(); // start uploading
The simplified code below is the part on the server that receives the chunk and writes it to the file system :
var fs = Npm.require('fs');
var Future = Npm.require('fibers/future');
Meteor.methods({
uploadFileData: function(fileId, chunk) {
var fut = new Future();
var path = '/uploads/' + fileId;
// I tried that with no success
chunk = String.fromCharCode.apply(null, chunk);
// how to write the chunk that is an Uint8Array to the disk ?
fs.appendFile(path, chunk, 'binary', function (err) {
if (err) {
fut.throw(err);
} else {
fut.return(chunk.length);
}
});
return fut.wait();
}
});
I failed to write a valid file to the disk, actually the file is saved but I cannot open it, when I see the content in a text editor, it is similar to the original file (a jpg for example) but some characters are different, I think that could be an encoding problem as the file size is not the same, but I don't know how to fix that...
Saving the file was as easy as creating a new Buffer with the Uint8Array object :
// chunk is the Uint8Array object
fs.appendFile(path, Buffer.from(chunk), function (err) {
if (err) {
fut.throw(err);
} else {
fut.return(chunk.length);
}
});
Building on Karl.S answer, this worked for me, outside of any framework:
fs.appendFileSync(outfile, Buffer.from(arrayBuffer));
Just wanted to add that in newer Meteor you could avoid some callback hell with async/await. Await will also throw and push the error up to client
Meteor.methods({
uploadFileData: async function(file_id, chunk) {
var path = 'somepath/' + file_id; // be careful with this, make sure to sanitize file_id
await fs.appendFile(path, new Buffer(chunk));
return chunk.length;
}
});