Create new json from source json file in test project - javascript

UPDATE
I have continued to work through this and have the following code - st
async generatejobs() {
const fs = require("fs");
const path = require("path");
const directoryPath = path.join(__dirname, "../data/pjo/in");
fs.readdir(directoryPath, function (err, files) {
if (err) {
console.log("Error getting directory information." + err);
} else {
files.forEach(function (file) {
console.log(file);
fs.readFile(file, (err, data) => {
console.log(file); // this works, if I stop here
// if (err) throw err;
// let newJson = fs.readFileSync(data);
// console.log(newJson);
})
// let data = fs.readFileSync(file);
// let obj = JSON.parse(data);
// let isoUtc = new Date();
// let isoLocal = toISOLocal(isoUtc);
// obj.printingStart = isoLocal;
// obj.printingEnd = isoLocal;
// let updatedFile = JSON.stringify(obj);
// let write = fs.createWriteStream(
// path.join(__dirname, "../data/pjo/out", updatedFile)
// );
// read.pipe(write);
});
}
});
As soon as I try uncomment the line shown below, it fails.
let newJson = fs.readFileSync(data);
The error I am getting is this.
Uncaught ENOENT: no such file or directory, open 'C:\projects\codeceptJs\ipt\80-012345.json'
This is a true statement as the path should be as follows.
'C:\projects\codeceptJs\ipt\src\data\pjo\in\80-012345.json'
I do not understand why it is looking for the file here given that earlier in the code the path is set and seems to work correctly for finding the file via this.
const directoryPath = path.join(__dirname, "../data/pjo/in");
The remainder of the code which is currently commented out is where I am attempting to do the following.
Grab each file from source dir
put into json object
Update the json object to change two date entries
Save to a new json file / new location in my project
Original Post
I have a codeceptjs test project and would like to include a set of existing json files in my project (src/data/jsondata/in) and then update the date attribute within each and write them to an output location in my project (src/data/jsondata/out). I need to change the date and then get it back into a very specific string format, which I have done and then insert this back into the new json being created. I got this about 80% of the way there and then ran into issues when trying to get the files from one folder within my project to another.
I broke this up in to two parts.
function to take a date and convert it to the date string I need
function to grab the source json, update the date, and make a new json at a new folder location
Number 1 is working as it should. Number 2 is not.
If there is a better way to accomplish this, I am very much open to that.
Here is the code where I'm trying to update the json. The main issue here is I'm not understanding and / or handling correctly the join path stuff.
generatePressJobs() {
//requiring path and fs modules
const path = require('path');
const fs = require('fs');
//joining path of directory
const directoryPath = path.join(__dirname, '../', 'data/pjo/in/');
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
I.say('unable to scan directory: ' + err);
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
files.forEach(function (file) {
// Update each file with new print dates
let data = fs.readFileSync(file);
let obj = JSON.parse(data);
let isoUtc = new Date();
let isoLocal = toISOLocal(isoUtc);
obj.printingStart = isoLocal;
obj.printingEnd = isoLocal;
let updatedFile = JSON.stringify(obj);
fs.writeFile(`C:\\projects\\csPptr\\ipt\\src\\data\\pjo\\out\\${file}`, updatedFile, (err) => {
if (err) {
throw err;
}
});
});
});
},
Error received
Uncaught ENOENT: no such file or directory, open '80-003599.json'
at Object.openSync (fs.js:462:3)
at Object.readFileSync (fs.js:364:35)
at C:\projects\codeceptJs\ipt\src\pages\Base.js:86:23
at Array.forEach (<anonymous>)
at C:\projects\codeceptJs\ipt\src\pages\Base.js:84:13
at FSReqCallback.oncomplete (fs.js:156:23)
The function to generate the json is located in src/pages/basePage.js
The folder structure I've built for the json file is located in
src/data/jsondata/in --> for original source files
src/data/jsondata/out --> for resulting json after change
Any insight or suggestions would be hugely appreciated.
Thank you,
Bob

My approach / resolution
Passing along the final approach I took in the event this is helpful to anyone else. The data in the middle was specific to my requirements, but left in to show the process I took to do what I needed to do.
async generatePressjobs(count) {
const fs = require("fs");
const path = require("path");
const sourceDirectoryPath = path.join(__dirname, "../data/pjo/in/");
const destDirectoryPath = path.join(__dirname, "../data/pjo/out/");
for (i = 0; i < count; i++) {
// read file and make object
let content = JSON.parse(
fs.readFileSync(sourceDirectoryPath + "source.json")
);
// Get current date and convert to required format for json file
let isoUtc = new Date();
let isoLocal = await this.toISOLocal(isoUtc);
let fileNameTimeStamp = await this.getFileNameDate(isoUtc);
// Get current hour and minute for DPI time stamp
let dpiDate = new Date;
let hour = dpiDate.getHours();
let minute = dpiDate.getMinutes();
dpiStamp = hour + '' + minute;
// update attributes in the json obj
content.batchid = `80-0000${i}`;
content.id = `80-0000${i}-10035-tcard-${dpiStamp}-0101010000_.pdf`
content.name = `80-0000${i}-8.5x11CALJEF-CalBody-${dpiStamp}-01010100${i}_.pdf`;
content.printingStart = isoLocal;
content.printingEnd = isoLocal;
// write the file
fs.writeFileSync(
destDirectoryPath + `80-0000${i}-SOME-JOB-NAME-${dpiStamp}.pdf_Press Job printing end_${fileNameTimeStamp}.json`,
JSON.stringify(content)
);
}
},

Related

How could I duplicate/copy file in an automatized way with JavaScript?

I have an gif file that is stored in a directory call assets on my computer. I would like to create X amount of duplicates and they should be stored in the same directory and each of them should have a different file name.
Example:
I in the assets directory is the gif file call 0.gif I would like to duplicate this gif file 10 times and The duplicates should be called 1.gif, 2.gif, 3.R and so on.
The simplest option is to use fs and using copyFile function available
const fs = require("fs");
const path = require("path");
let copyMultiple = (src, count) => {
let initCount = 0;
while (initCount < count) {
initCount++;// you can put this at bottom too acc to your needs
const newFileName = `${initCount}_${initCount}${path.extname(src)}`;
console.log(newFileName, "is new file name");
fs.copyFile(src, newFileName, (error) => {
// if errors comes
if (error) {
console.log(error);
}
});
}
};
copyMultiple("1.gif", 3);
Another elegant way of doing this is
const util = require("util");
const fs = require("fs");
const path = require("path");
const copyFilePromise = util.promisify(fs.copyFile);
function copyFiles(srcFile, destDir, destFileNames) {
return Promise.all(
destFileNames.map((file) => {
return copyFilePromise(srcFile, path.join(destDir, file));
})
);
}
const myDestinationFileNames = ["second.gif", "third.gif"];
const sourceFileName = "1.gif";
copyFiles(sourceFileName, "", myDestinationFileNames)
.then(() => {
console.log("Copying is Done");
})
.catch((err) => {
console.log("Got and Error", error);
});
Using this will also give upperhand of knowing when it is done.
You can read docs here
const fs = require("fs")
const filename = "index.js".split(".") //filename like 0.gif to gif
const times = 10 // number of times to duplicate
for(var int = 1; int < times; int++){
const newFilename = `${(parseInt(filename[0]) + init)}.${filename[1]}` //new filename like 0.gif to 1.gif
fs.copyFileSync(filename, newfilename)
}
use the write file and read file from the fs module and a simple for loop
not sure which framework you're on but fs.copyFile() is the standard way for node.js https://nodejs.org/api/fs.html#fscopyfilesrc-dest-mode-callback

Node.JS Javascript fetching file from directory returning same file

I'm trying to read data files from a directory called 'myfiles' using a script 'app.js'.
The file to be read depends on the configurations passed to 'app.js' by another process which generates the files 'log.txt' and 'config.json'.
'app.js' then reads the files 'log.txt' and 'config.json' and fetches a data file from 'myfiles' based on the file name received.
The files in 'myfiles' are labeled my-files1.txt, my-files2.txt, my-files3.txt, ... etc.
The problem is, I keep fetching the same file 'my-files1.txt', even though 'log.txt' and 'config.json' provide new names for files to fetch.
Could you please help me spot where the problem is?
Thanks for your help!
app.js:
const fs = require('fs');
const { resolve } = require('path');
function readLog() {
try {
return fs.readFileSync(__dirname + '/' + 'log.txt', 'utf8');
}
catch (err) {
return '';
}
}
function readConfig() {
try {
return fs.readFileSync(__dirname + '/' + 'config.json', 'utf8');
}
catch (err) {
return '';
}
}
let config = readConfig();
let log = readLog();
let cfg = JSON.parse(config);
let lcg = JSON.parse(log);
var currentPage = lcg.tPartition; // Current page to fetch
const numberOfPages = 10;
if((cfg.running!=true) && (Number(currentPage)>Number(numberOfPages))){
currentPage = 1;
}
function readDataset() {
try {
return fs.readFileSync(resolve(`./app/assets/myfiles/my-files${currentPage}.txt`), 'utf8');
}
catch (err) {
return err;
}
}
let dataset = readDataset();
const data = {
data1: dataset // set dataset as value of data1
}
module.exports={data};
you have written
let lcg = JSON.parse(log);
but your log is a txt file how can you parse it, is this the problem let me know if this worked by any chance;

How to write a data as file names in a folder to CSV file using Nodejs

Anty body did write file names in a folder to the CSV file using javascript
my folder structure is
Data
+IMG
+test
-1.png
-2.png
+train
-3.png
-4.png
an output CSV file will be like this
Data/IMG/test/1.png Data/IMG/train/3.png
Data/IMG/test/2.png Data/IMG/train/4.png
You just need to loop through all folders and find all files.
You can refer to this answer to do this.
when you are finding all files' paths, you can write these paths in a string for the csv file:
const fs = require('fs');
const path = require('path');
let csvStr = "";
async function loop(startPath) {
// Our starting point
try {
// Get the files as an array
const files = await fs.promises.readdir(startPath);
// Loop them all with the new for...of
for (const file of files) {
// Get the full paths
const currentPath = path.join(startPath, file);
// Stat the file to see if we have a file or dir
const stat = await fs.promises.stat(currentPath);
if (stat.isFile()) {
console.log("'%s' is a file.", currentPath);
// put the file into csv string
csvStr += currentPath + ", "
} else if (stat.isDirectory()) {
console.log("'%s' is a directory.", currentPath);
// enter the dictionary and loop
await loop(currentPath);
}
} // End for...of
} catch (e) {
// Catch anything bad that happens
console.error("We've thrown! Whoops!", e);
}
}
// Make an async function that gets executed immediately
(async () => {
// start loop from the path where you run node
await loop("./");
fs.writeFileSync("your.csv", csvStr);
})();

Can't read file after writing it with fs

I want to read an image ,write it in a folder and read it again to get it's base64
I get the following error:
Error: ENOENT: no such file or directory, access 'C:\Workspace\Project\upload_storage\image.jpg'
at Object.accessSync (fs.js:192:3)
My code:
const FS = require("fs");
var multiparty = require('multiparty');
var path = require('path');
function readAndWriteFile(file , newPath){
FS.readFileSync(file.path, (err, data)=>{
FS.writeFileSync(newPath, data, (err)=>{
});
});
}
function base64Encode(path,filemime) {
FS.readFileSync(path, {encoding: 'base64'}, (err, data)=>{
if (err) {
throw err;
}
return `data:${filemime};base64,${data}`;
});
}
...
var form = new multiparty.Form()
//retrieve files using multiparty form
form.parse(req, function(err, fields, files) {
var document;
const documents = files.file;
for(i=0; i<documents.length; i++){
document=documents[i];
const contentType = String(document.headers["content-type"]);
filePath = path.join(process.cwd(),'/upload_storage/',document.originalFilename);
readAndWriteFile(document,filePath);
// // convert image to base64 encoded string
const base64str = base64Encode(filePath, contentType);
console.log(base64str);
}
}
if I comment the base64Encode function call the files get created.
What am I doing wrong?
Don't use callbacks with _fileSync. But it looks like you want copyFileSync followed by unlinkSync anyway:
const fs = require('fs');
function readAndWriteFile(file , newPath){
fs.copyFileSync(file.path, newPath);
fs.unlinkSync(file.path)
}
Did you try reading the documentation for fs?
More reading and examples in this question,

Node.js - Getting empty files when unzipping and uploading to GCS

I am trying to create a service that gets a zip file, unpacks it, and uploads its contents to a Google Cloud Storage bucket.
The unzipping part seems to work well, but in my GCS bucket all the files seem to be empty.
I'm using the following code:
app.post('/fileupload', function(req, res) {
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
const uuid = uuidv4();
console.log(files.filetoupload.path); // temporary path to zip
fs.createReadStream(files.filetoupload.path)
.pipe(unzip.Parse())
.on('entry', function (entry) {
var fileName = entry.path;
var type = entry.type; // 'Directory' or 'File'
var size = entry.size;
const gcsname = uuid + '/' + fileName;
const blob = bucket.file(gcsname);
const blobStream = blob.createWriteStream(entry.path);
blobStream.on('error', (err) => {
console.log(err);
});
blobStream.on('finish', () => {
const publicUrl = format(`https://storage.googleapis.com/${bucket.name}/${blob.name}`);
console.log(publicUrl); // file on GCS
});
blobStream.end(entry.buffer);
});
});
});
I'm quite new to Node.js so I'm probably overlooking something - I've spent some time on documentation but I don't quite know what to do.
Could someone advise on what might be the problem?
The fs.createWriteStream() takes file path as argument but GCS createWriteStream() takes options
As per the example in this documentation the recommended way would be:
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
instead of:
const blobStream = blob.createWriteStream(entry.path).
Check whether your buffer is undefined or not . It may be due to unspecified disk/Mem storage that the buffer remains undefined .

Categories