I have a fs.readdir() function that I want to run every few hours. the reason for this is a bunch of files are sent to my box every so often and I have to run a script for each file to upload them to a db.
fs.readdir('some/location/',(err,files) => {
if (files.indexOf('someFile') > -1) {
console.log('file found')
function1()
}else{
Console.log('file not found')
})
now I don't want to have to do the readdir for all 13 files that get dropped is there a way to create a list like
someFile = function1()
someFile2 = function2()
So it runs each files dedicated function for each file present on the readdir array?
var function1 = function........
var function2 = function... do.somethng.diferent
You can:
Create an object {} called fileFunctions that maps each file name to the function to apply.
Iterate over the files of the directory and apply the function matching this file name, using the object you just built, if this file name matches an entry in fileFunctions:
fileFunctions = {
'file1' => function() {/*function to apply to file1*/},
'file2' => function() {/*function to apply to file2*/},
//etc..
};
fs.readdir('some/location/',(err,files) => {
files.map((file) => {
if(file in fileFunctions) {
fileFunctions[file1](file1);
}
});
});
Related
I have a project that has functions that read files and extract their hash code. After these hash codes are extracted in the project, subfiles are built one by one. Finally, what I want to do is to throw all these hash codes into an array and create a json file. I need to do this after the IterateFolders() function has run and finished in readDirectory function. But console.log is running on a bottom line without waiting for this function, please help.
My functions are as follows:
//Calculate build time
function getBuildTime(start,end) {
let time = (end - start);
let buildTime = `${new Date().toLocaleDateString()} ${new Date().toLocaleTimeString()} Build time: ${time} ms \n`
fs.writeFile('build-time.log', buildTime,function (err) { //output log file
if (err) return console.log(err);
});
}
//async metaHash calculation from folder path
async function computeMetaHash(folder, inputHash = null) {
const hash = inputHash ? inputHash : createHash('sha256');
const info = await fsp.readdir(folder, { withFileTypes: true });
//construct a string from the modification date, the filename and the filesize
for (let item of info) {
const fullPath = path.join(folder, item.name)
if (item.isFile()) {
const statInfo = await fsp.stat(fullPath); //stat return all informations about file
// compute hash string name:size:mtime
const fileInfo = `${fullPath}:${statInfo.size}:${statInfo.mtimeMs}`;
hash.update(fileInfo);
} else if (item.isDirectory()) {
// recursively walk sub-folders
await computeMetaHash(fullPath, hash);
}
}
// if not being called recursively, get the digest and return it as the hash result
if (!inputHash) {
return hash.digest('base64');
}
}
async function iterateFolders(folderPath) {
folderPath.forEach(function (files) {
//function takes folder path as inputh
computeMetaHash(files).then(result => { //call create hash function
console.log({"path":files,"hashCode":result});
}).then(()=>{ //build fragments
//The files is array, so each. files is the folder name. can handle the folder.
console.log("%s build...", files);
execSync(`cd ${files} && npm run build`, { encoding: 'utf-8' });
}).then(()=>{// Finish timing
end = new Date().getTime();
getBuildTime(start,end);
}).catch(err => {
console.log(err);
});
});
}
async function readDirectory() {
let files = await readdir(p)
const folderPath = files.map(function (file) {
//return file or folder path
return path.join(p, file);
}).filter(function (file) {
//use sync judge method. The file will add next files array if the file is directory, or not.
return fs.statSync(file).isDirectory();
})
//check hash.json exist or not
if (fs.existsSync(hashFile)) {
// path exists
console.log("File exists: ", hashFile);
}
else
{
//This is the first pipeline, all fragments will build then hash.json will created.
console.log(hashFile," does NOT exist, build will start and hash.json will created:");
// Start timing
start = new Date().getTime();
iterateFolders(folderPath,files);
console.log("IT WILL BE LAST ONE ")
}
}
readDirectory();
Well if you want to wait for its execution, then you have to use await :) Currently it's just iterateFolders(folderPath,files);, so you run it, but you don't wait for it.
await iterateFolders(folderPath,files);
That's your first issue. Then this method runs some loop and calls some other methods. But first async-await needs to return a promise (which you do not do). And second - it doesn't work in forEach, as stated in the comments above. Read Using async/await with a forEach loop for more details.
Fix those three issues and you'll make it.
In the iterateFolders function, you need to await computeMetaHash calls. To do so you can either use a for loop instead of calling forEach on folderPath or change forEach to map and use Promise.all.
Using the for loop method (synchronous):
async function iterateFolders(folderPath) {
for (let files of folderPath) {
//function takes folder path as inputh
await computeMetaHash(files).then(result => { //call create hash function
console.log({"path":files,"hashCode":result});
}).then(()=>{ //build fragments
//The files is array, so each. files is the folder name. can handle the folder.
console.log("%s build...", files);
execSync(`cd ${files} && npm run build`, { encoding: 'utf-8' });
}).then(()=>{// Finish timing
end = new Date().getTime();
getBuildTime(start,end);
}).catch(err => {
console.log(err);
});
}
}
Using the Promise.all method (asynchronous):
async function iterateFolders(folderPath) {
return Promise.all(folderPath.map(function (files) {
//function takes folder path as inputh
return computeMetaHash(files).then(result => { //call create hash function
console.log({"path":files,"hashCode":result});
}).then(()=>{ //build fragments
//The files is array, so each. files is the folder name. can handle the folder.
console.log("%s build...", files);
execSync(`cd ${files} && npm run build`, { encoding: 'utf-8' });
}).then(()=>{// Finish timing
end = new Date().getTime();
getBuildTime(start,end);
}).catch(err => {
console.log(err);
});
}));
}
If you prefer, using async/await also allows you to get rid of the then and catch in both methods which I believe makes it a little easier to read and understand.
Here's an example using the Promise.all method:
async function iterateFolders(folderPath) {
return Promise.all(folderPath.map(async (files) => {
try {
const result = await computeMetaHash(files);
console.log({ path: files, hashCode: result });
// build fragments
//The files is array, so each. files is the folder name. can handle the folder.
console.log('%s build...', files);
execSync(`cd ${files} && npm run build`, { encoding: 'utf-8' });
// Finish timing
const end = Date.now();
getBuildTime(start, end);
} catch(err) {
console.log(err);
}
}));
}
You might also want to check out for await... of
Note: you also need to await iterateFolders when it's called in readDirectory.
So I have a mind boggler, my app grabs a git repo (simple-git) then makes an npm i on the files inside (shelljs) and then zips using archiver. Of course it needs to be async but the first part and second part work however at the point of archiving it fails (the next step is for axios to await the zip being done), also before this when I ran the zip code with the grabbing repo code it would create the zip file in the correct root directory but now does it in the folder directory instead (repo/folder) , the zip is empty now though instead of zipping the other contents. Please assist if you can
The code:
// // //Make call
function makeCall() {
return new Promise((resolve) => {
resolve(
git()
.silent(true)
.clone(remote, ["-b", branch])
.then(() => console.log("finished"))
.catch((err) => console.error("failed: ", err))
);
});
}
//Make NPM Modules
async function makeModules() {
await makeCall();
const pathOfFolder = path.join(__dirname, "folder/sub");
shell.cd(pathOfFolder)
return new Promise((resolve) => {
resolve(
shell.exec("npm i"));
});
}
async function makeZip() {
await makeModules();
const output = fs.createWriteStream(`${branch}.zip`);
const archive = archiver("zip");
output.on("close", function () {
console.log(archive.pointer() + " total bytes");
console.log(
"archiver has been finalized and the output file descriptor has closed."
);
});
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory("folder", false);
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory("subdir/", "new-subdir");
archive.finalize();
}
makeZip();
Resolved it, moved to other files and set path correctly
I am getting a zipped file of images and csv files from a user. I need to unzip those files and upload them to Firestore. When I upload the images, I also want to save the path to download the image in an image object which I push later. I want to save the id's of all the image objects that I push and save them in a term object. I want to wait until all my files are uploaded before calling some other functions.
My function though is being called immediately before my termObj is populated. I tried to save promises and wait until all of them resolve but the promise.all is being executed immediately ( it does not wait for promises to get populated in the for loop )
How do I wait for all of my files to be uploaded and have a populated termObj before I call this.printDone()?
async fileChanged(event) {
const file = event.target.files[0];
const self = this;
let promises= [];
let termObj = {
all_images: [],
};
this.zipService.getEntries(file).subscribe( (next) => {
for (const ent of next) {
let filename : string = ent.filename;
const fileType = filename.slice(filename.indexOf("."));
this.zipService.getData(ent).data.subscribe(async function(val) {
let blobFile = new File([val], filename);
self.task = self.storage.upload(self.authService.getUser() + "/" +filename, blobFile);
if( fileType === '.jpg' || fileType === '.jpeg' || fileType === '.png'){
let pathToFile = self.authService.getUser() + '/' + filename;
// URL
await firebase.storage().ref().child( pathToFile ).getDownloadURL().then(function (url) {
let imageObj = {
downloadURL: url,
};
const imagePromise = self.db.collection('images').add(imageObj).then(function(ref){
termObj.all_images.push(ref.id);
console.log(termObj);
});
promises.push(imagePromise);
});
}
}); // end of unzip service that gets data from zipped entry
} // end of for loop looping through files
}); //gets entries from zipped file
await Promise.all(promises).then(()=>{
this.printDone();
console.log(termObj.all_images);
});
} // end of method
=============Edit==========
moving the await Promise.all statement right after the end of the for loop and then console.log the termObj.all_images gives me the same result.
output of console.log(termObj.all_images)
Move this
await Promise.all(promises).then(()=>{
this.printDone();
console.log(termObj.all_images);});
just after closing the for of.
Your problem is caused because the subscribe is asynchronous (you already know that) But that implies that the promises array is empty when this code is reached.
I am parsing multiple large JSON files to my mongoDB database. At the moment I am using stream-json npm package. After I load one file I change the filename that I am loading and relaunch the script to load the next file. This is unnecessarily time consuming. So how can I iterate through all the files automatically? At the moment my code looks like this:
const StreamArray = require('stream-json/utils/StreamArray');
const path = require('path');
const fs = require('fs');
const filename = path.join(__dirname, './data/xa0.json'); //The next file is named xa1.json and so on.
const stream = StreamArray.make();
stream.output.on('data', function (object) {
// my function block
});
stream.output.on('end', function () {
console.log('File Complete');
});
fs.createReadStream(filename).pipe(stream.input);
I tried iterating through the names of the files by adding a loop that would add +1 to the filename i.e. xa0 to xa1 at the same point where the script console.log('File Complete') but this didn't work. Any ideas how I might be able to achieve this or something similar.
Just scan your JSON files directory using fs.readdir. It will return a list of file names that you can then iterate, something like this :
fs.readdir("./jsonfiles", async (err, files) => {
for( file in files ){
await saveToMongo("./jsonfiles/" + file)
}
})
So you just launch your script once and wait until full completion.
Of course, in order for it to be awaited, you need to promisify the saveToMongo function, something like :
const saveToMongo = fileName => {
return new Promise( (resolve, reject) => {
// ... logic here
stream.output.on('end', function () {
console.log('File Complete');
resolve() // Will trigger the next await
});
})
}
I am new to react-native development and using RNFetchBlob to work with files in internal storage, I wanted to create a gallery application and for that I need to fetch all the images present in the phone.
What i was able to do is fetch all files from a particular directory and search for images in it.
RNFetchBlob.fs.ls(RNFetchBlob.fs.dirs.DCIMDir).then((files) => {
//Returns all files present in the directory
console.log(files);
//Returns files with .png or .jpg extension.
console.log(files.find((element) => {return element.match('/(.png)$|(.jpg)$|(.jpeg)$/g')}));
}).catch((err) =>{
console.log(err);
});
But with this approach I need to search in every directory by iterating into them with recursion, I wanted to know if there is some way fetching all the image files just with one command.
We can have a recursive function something like below. I assume there must be some other optimized way but I ended up doing this. Besides we can cut off traversal inside directory if lastModified has not been changed.
async function findAllFilesByExtension(path = INTERNAL_STORAGE_PATH) {
let files = [];
const lsList = await RNFetchBlob.fs.lstat(path);
if (!!lsList && Array.isArray(lsList)) {
let dirs = [];
lsList.forEach(item => {
if (item.type === 'directory') {
dirs.push(item);
return;
}
if (item.type === 'file' && item.filename.match('/(.png)$|(.jpg)$|(.jpeg)$/g')) {
files.push(item)
return;
}
});
const response = await Promise.all(unmodifiedDirs.map(item=>findAllFilesByExtension(item.path)));
response.forEach(nestedFiles => {
files = [...files, ...nestedFiles];
});
}
return files;
}