NodeJS concatenate all files in a directory - javascript

Is there a faster or more succinct way to concatenate all of the files located in a directory using NodeJS?
In bash I could do something like this:
for file in $1
do
cat "$file"
echo
done > $2;
Here is what I'm doing now:
var fs = require('fs');
var Promise = require('bluebird');
module.exports = function(directory, destination) {
return new Promise((resolve, reject) => {
fs.readdir(directory, (err, files) => {
if (err) {
return reject(err);
}
(function next() {
var file = files.shift();
if (!file) {
return resolve();
}
fs.readFile(directory + '/' + file, (err, content) => {
if (err) {
return reject(err);
}
fs.appendFile(destination, '\n' + content, (err) => {
if (err) {
return reject(err);
}
return next();
});
});
})();
});
});
};

That ?
require('child_process').execSync('cat *').toString('UTF-8')
:D

Using the async library you can easily read files in parallel and then join the results.
const fs = require("fs");
const async = require("async");
const path = require("path");
module.exports = function(directory, destination) {
return new Promise((resolve, reject) => {
fs.readdir(directory, (err, files) => {
if (err)
return reject(err);
files = files.map(file => path.join(directory,file));
//Read all files in parallel
async.map(files, fs.readFile, (err, results) => {
if (err)
return reject(err);
//results[0] contents of file #1
//results[1] contents of file #2
//results[n] ...
//Write the joined results to destination
fs.writeFile(destination, results.join("\n"), (err) => {
if (err)
return reject(err);
resolve();
});
});
});
});
}

If you're going to use bluebird than you get the benefit of promisification. You can use promisifyAll() to convert all error first callback accepting async functions in the fs module to return a promise. You can read more about in the above promisification link.
The below code reads in all of the files as strings and then reduces all of their contents into a single string and writes that string to the destination.
Its probably best to not catch() any returned errors here. Rather, the caller should attach a catch() to handle any returned errors as they need.
const Promise = require('bluebird')
const fs = Promise.promisifyAll(require('fs'))
const path = require('path')
module.exports = (directory, destination) => {
return fs.readdirAsync(directory)
.map(file => fs.readFileAsync(path.join(directory, file), 'utf8'))
.then(contents => fs.writeFileAsync(destination, contents.join('\n')))
}

All in one line:
fs.readdirSync('./').forEach((file) => { if(fs.lstatSync(file).isFile()) fs.appendFileSync('./output.file', fs.readFileSync(file).toString()) })
Replace './' with target directory and './output.file' with target destination.

You can do the same thing without bluebird , since node 8.X includes the util package to promisify fs API.
This exemple shows how i use it in my project to concatenate minified files (so remove the filter if necessary).
You can do the same thing without bluebird , since node 8.X includes the util package to promisify fs API.
This exemple shows how i use it in my project to concatenate minified files (so remove the filter if necessary).
const {promisify} = require("util"); //requires node 8.X
const readdir = promisify(fs.readdir);
const readFile = promisify(fs.readFile);
const appendFile = promisify(fs.appendFile);
// Append all minified and obsfucated files in source directory
// The resulting file is generated in destination
function appendFile(directory, destination) {
readdir(directory)
.then((files) => {
console.log('FILES CONTENT:', files);
files.filter(file => {
console.log('FILTER > ' + file);
return (file.indexOf('-min.js') != -1 && file.indexOf('-min.js.map') == -1)
})
.map(file => {
console.log('MAP ('+destination+') > ' + path.join(directory, file));
readFile(path.join(directory, file), 'utf8')
.then(data => {
//console.log('DATA:', data);
appendFile(destination, data+'\n')
.then(() => {
console.log('append done');
})
.catch((err) => {
displayError(err);
});
});
});
})
.catch((err) => {
console.log('ERROR:', err);
displayError(err);
});
}

Related

Get all files within a folder containing string, push filenames to array and return array using Node FS

As the title describes, I would like to read all files from a specific directory and return the names of all the files that have a specific string in its contents.
Here is how my code looks so far:
const dirname = path.resolve("./results/");
async function readFiles(dirname) {
const allResults = []
fs.readdir(dirname, function(err, filenames) {
if (err) {
console.log(err);
return;
}
filenames.forEach(async function(filename) {
fs.readFile(dirname + "/" + filename, 'utf-8', function(err, content) {
if (err) {
console.log(err);
return;
}
if (content.includes('content string')) {
allResults.push(filename);
}
});
});
});
return allResults;
}
readFiles(dirname).then((res) => {
console.log(res);
})
The result I'm getting is []
so I understand it's an issue with promises and async functions, however, this is not a concept I fully grasp yet, and despite trying several combinations of possibilities (new Promise(), or .then, await, or readdirSync and readFileSync) I had no success.
What am I missing so that it returns the allResults array only once all files have been read?
You should ditch callback syntax and use fs.promises Api. This looks much cleaner
const fs = require("fs").promises;
const path = require("path");
const dirname = path.resolve("./results/");
async function readDir(dirname) {
const allResults = [];
try {
const files = await fs.readdir(dirname);
for (const fileName of files) {
try {
const content = await fs.readFile(`${dirname}/${fileName}`, {
encoding: "utf-8"
});
if (content.includes("content string")) {
allResults.push(fileName);
}
} catch (error) {
console.log(error.message);
}
}
return allResults;
} catch (error) {
console.log(error);
}
}
readDir(dirname).then(data => {
console.log(data);
});

How do I get an array of folders name in an certain directory? [duplicate]

I was hoping this would be a simple thing, but I cannot find anything out there to do so.
I just want to get all folders/directories within a given folder/directory.
So for example:
<MyFolder>
|- SomeFolder
|- SomeOtherFolder
|- SomeFile.txt
|- SomeOtherFile.txt
|- x-directory
I would expect to get an array of:
["SomeFolder", "SomeOtherFolder", "x-directory"]
Or the above with the path if that was how it was served...
So does anything already exist to do the above?
Promise
import { readdir } from 'fs/promises'
const getDirectories = async source =>
(await readdir(source, { withFileTypes: true }))
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
Callback
import { readdir } from 'fs'
const getDirectories = (source, callback) =>
readdir(source, { withFileTypes: true }, (err, files) => {
if (err) {
callback(err)
} else {
callback(
files
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
)
}
})
Syncronous
import { readdirSync } from 'fs'
const getDirectories = source =>
readdirSync(source, { withFileTypes: true })
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
List directories using a path.
function getDirectories(path) {
return fs.readdirSync(path).filter(function (file) {
return fs.statSync(path+'/'+file).isDirectory();
});
}
Recursive solution
I came here in search of a way to get all of the subdirectories, and all of their subdirectories, etc. Building on the accepted answer, I wrote this:
const fs = require('fs');
const path = require('path');
function flatten(lists) {
return lists.reduce((a, b) => a.concat(b), []);
}
function getDirectories(srcpath) {
return fs.readdirSync(srcpath)
.map(file => path.join(srcpath, file))
.filter(path => fs.statSync(path).isDirectory());
}
function getDirectoriesRecursive(srcpath) {
return [srcpath, ...flatten(getDirectories(srcpath).map(getDirectoriesRecursive))];
}
This should do it:
CoffeeScript (sync)
fs = require 'fs'
getDirs = (rootDir) ->
files = fs.readdirSync(rootDir)
dirs = []
for file in files
if file[0] != '.'
filePath = "#{rootDir}/#{file}"
stat = fs.statSync(filePath)
if stat.isDirectory()
dirs.push(file)
return dirs
CoffeeScript (async)
fs = require 'fs'
getDirs = (rootDir, cb) ->
fs.readdir rootDir, (err, files) ->
dirs = []
for file, index in files
if file[0] != '.'
filePath = "#{rootDir}/#{file}"
fs.stat filePath, (err, stat) ->
if stat.isDirectory()
dirs.push(file)
if files.length == (index + 1)
cb(dirs)
JavaScript (async)
var fs = require('fs');
var getDirs = function(rootDir, cb) {
fs.readdir(rootDir, function(err, files) {
var dirs = [];
for (var index = 0; index < files.length; ++index) {
var file = files[index];
if (file[0] !== '.') {
var filePath = rootDir + '/' + file;
fs.stat(filePath, function(err, stat) {
if (stat.isDirectory()) {
dirs.push(this.file);
}
if (files.length === (this.index + 1)) {
return cb(dirs);
}
}.bind({index: index, file: file}));
}
}
});
}
Alternatively, if you are able to use external libraries, you can use filehound. It supports callbacks, promises and sync calls.
Using promises:
const Filehound = require('filehound');
Filehound.create()
.path("MyFolder")
.directory() // only search for directories
.find()
.then((subdirectories) => {
console.log(subdirectories);
});
Using callbacks:
const Filehound = require('filehound');
Filehound.create()
.path("MyFolder")
.directory()
.find((err, subdirectories) => {
if (err) return console.error(err);
console.log(subdirectories);
});
Sync call:
const Filehound = require('filehound');
const subdirectories = Filehound.create()
.path("MyFolder")
.directory()
.findSync();
console.log(subdirectories);
For further information (and examples), check out the docs: https://github.com/nspragg/filehound
Disclaimer: I'm the author.
With node.js version >= v10.13.0, fs.readdirSync will return an array of fs.Dirent objects if withFileTypes option is set to true.
So you can use,
const fs = require('fs')
const directories = source => fs.readdirSync(source, {
withFileTypes: true
}).reduce((a, c) => {
c.isDirectory() && a.push(c.name)
return a
}, [])
var getDirectories = (rootdir , cb) => {
fs.readdir(rootdir, (err, files) => {
if(err) throw err ;
var dirs = files.map(filename => path.join(rootdir,filename)).filter( pathname => fs.statSync(pathname).isDirectory());
return cb(dirs);
})
}
getDirectories( myDirectories => console.log(myDirectories));``
Using fs-extra, which promises the async fs calls, and the new await async syntax:
const fs = require("fs-extra");
async function getDirectories(path){
let filesAndDirectories = await fs.readdir(path);
let directories = [];
await Promise.all(
filesAndDirectories.map(name =>{
return fs.stat(path + name)
.then(stat =>{
if(stat.isDirectory()) directories.push(name)
})
})
);
return directories;
}
let directories = await getDirectories("/")
This answer does not use blocking functions like readdirSync or statSync. It does not use external dependencies nor find itself in the depths of callback hell.
Instead we use modern JavaScript conveniences like Promises and and async-await syntaxes. And asynchronous results are processed in parallel; not sequentially -
const { readdir, stat } =
require ("fs") .promises
const { join } =
require ("path")
const dirs = async (path = ".") =>
(await stat (path)) .isDirectory ()
? Promise
.all
( (await readdir (path))
.map (p => dirs (join (path, p)))
)
.then
( results =>
[] .concat (path, ...results)
)
: []
I'll install an example package, and then test our function -
$ npm install ramda
$ node
Let's see it work -
> dirs (".") .then (console.log, console.error)
[ '.'
, 'node_modules'
, 'node_modules/ramda'
, 'node_modules/ramda/dist'
, 'node_modules/ramda/es'
, 'node_modules/ramda/es/internal'
, 'node_modules/ramda/src'
, 'node_modules/ramda/src/internal'
]
Using a generalised module, Parallel, we can simplify the definition of dirs -
const Parallel =
require ("./Parallel")
const dirs = async (path = ".") =>
(await stat (path)) .isDirectory ()
? Parallel (readdir (path))
.flatMap (f => dirs (join (path, f)))
.then (results => [ path, ...results ])
: []
The Parallel module used above was a pattern that was extracted from a set of functions designed to solve a similar problem. For more explanation, see this related Q&A.
And a async version of getDirectories, you need the async module for this:
var fs = require('fs');
var path = require('path');
var async = require('async'); // https://github.com/caolan/async
// Original function
function getDirsSync(srcpath) {
return fs.readdirSync(srcpath).filter(function(file) {
return fs.statSync(path.join(srcpath, file)).isDirectory();
});
}
function getDirs(srcpath, cb) {
fs.readdir(srcpath, function (err, files) {
if(err) {
console.error(err);
return cb([]);
}
var iterator = function (file, cb) {
fs.stat(path.join(srcpath, file), function (err, stats) {
if(err) {
console.error(err);
return cb(false);
}
cb(stats.isDirectory());
})
}
async.filter(files, iterator, cb);
});
}
Fully async version with ES6, only native packages, fs.promises and async/await, does file operations in parallel:
const fs = require('fs');
const path = require('path');
async function listDirectories(rootPath) {
const fileNames = await fs.promises.readdir(rootPath);
const filePaths = fileNames.map(fileName => path.join(rootPath, fileName));
const filePathsAndIsDirectoryFlagsPromises = filePaths.map(async filePath => ({path: filePath, isDirectory: (await fs.promises.stat(filePath)).isDirectory()}))
const filePathsAndIsDirectoryFlags = await Promise.all(filePathsAndIsDirectoryFlagsPromises);
return filePathsAndIsDirectoryFlags.filter(filePathAndIsDirectoryFlag => filePathAndIsDirectoryFlag.isDirectory)
.map(filePathAndIsDirectoryFlag => filePathAndIsDirectoryFlag.path);
}
Tested, it works nicely.
You can use graph-fs
const {Node} = require("graph-fs");
const directory = new Node("/path/to/directory");
const subDirectories = directory.children.filter(child => child.is.directory);
Using the glob package, just add a trailing slash to find directories only:
import {promise as glob} from "glob-promise"
const firstLevelFolders = await glob("MyFolder/*/")
const recursiveFolders = await glob("MyFolder/**/")
CoffeeScript version of this answer, with proper error handling:
fs = require "fs"
{join} = require "path"
async = require "async"
get_subdirs = (root, callback)->
fs.readdir root, (err, files)->
return callback err if err
subdirs = []
async.each files,
(file, callback)->
fs.stat join(root, file), (err, stats)->
return callback err if err
subdirs.push file if stats.isDirectory()
callback null
(err)->
return callback err if err
callback null, subdirs
Depends on async
Alternatively, use a module for this!
(There are modules for everything. [citation needed])
If you need to use all async version. You can have something like this.
Record the directory length, uses it as an indicator to tell if all async stat tasks are finished.
If the async stat tasks are finished, all the file stat has been checked, so call the callback
This will only work as long as Node.js is single thread, because it assumes no two async tasks will increase the counter at the same time.
'use strict';
var fs = require("fs");
var path = require("path");
var basePath = "./";
function result_callback(results) {
results.forEach((obj) => {
console.log("isFile: " + obj.fileName);
console.log("fileName: " + obj.isFile);
});
};
fs.readdir(basePath, (err, files) => {
var results = [];
var total = files.length;
var finished = 0;
files.forEach((fileName) => {
// console.log(fileName);
var fullPath = path.join(basePath, fileName);
fs.stat(fullPath, (err, stat) => {
// this will work because Node.js is single thread
// therefore, the counter will not increment at the same time by two callback
finished++;
if (stat.isFile()) {
results.push({
fileName: fileName,
isFile: stat.isFile()
});
}
if (finished == total) {
result_callback(results);
}
});
});
});
As you can see, this is a "depth first" approach, and this could result in callback hell, and it is not quite "functional" . People try to solve this problem with Promise, by wrapping the async task into an Promise object.
'use strict';
var fs = require("fs");
var path = require("path");
var basePath = "./";
function result_callback(results) {
results.forEach((obj) => {
console.log("isFile: " + obj.fileName);
console.log("fileName: " + obj.isFile);
});
};
fs.readdir(basePath, (err, files) => {
var results = [];
var total = files.length;
var finished = 0;
var promises = files.map((fileName) => {
// console.log(fileName);
var fullPath = path.join(basePath, fileName);
return new Promise((resolve, reject) => {
// try to replace fullPath wil "aaa", it will reject
fs.stat(fullPath, (err, stat) => {
if (err) {
reject(err);
return;
}
var obj = {
fileName: fileName,
isFile: stat.isFile()
};
resolve(obj);
});
});
});
Promise.all(promises).then((values) => {
console.log("All the promise resolved");
console.log(values);
console.log("Filter out folder: ");
values
.filter((obj) => obj.isFile)
.forEach((obj) => {
console.log(obj.fileName);
});
}, (reason) => {
console.log("Not all the promise resolved");
console.log(reason);
});
});
use fs、path module can got the folder. this use Promise. If your will get the fill, your can change isDirectory() to isFile() Nodejs--fs--fs.Stats.At last, you can get the file'name file'extname and so on Nodejs---Path
var fs = require("fs"),
path = require("path");
//your <MyFolder> path
var p = "MyFolder"
fs.readdir(p, function (err, files) {
if (err) {
throw err;
}
//this can get all folder and file under <MyFolder>
files.map(function (file) {
//return file or folder path, such as **MyFolder/SomeFile.txt**
return path.join(p, file);
}).filter(function (file) {
//use sync judge method. The file will add next files array if the file is directory, or not.
return fs.statSync(file).isDirectory();
}).forEach(function (files) {
//The files is array, so each. files is the folder name. can handle the folder.
console.log("%s", files);
});
});
Just in case anyone else ends up here from a web search, and has Grunt already in their dependency list, the answer to this becomes trivial. Here's my solution:
/**
* Return all the subfolders of this path
* #param {String} parentFolderPath - valid folder path
* #param {String} glob ['/*'] - optional glob so you can do recursive if you want
* #returns {String[]} subfolder paths
*/
getSubfolders = (parentFolderPath, glob = '/*') => {
return grunt.file.expand({filter: 'isDirectory'}, parentFolderPath + glob);
}
Another recursive approach
Thanks to Mayur for knowing me about withFileTypes. I written following code for getting files of particular folder recursively. It can be easily modified to get only directories.
const getFiles = (dir, base = '') => readdirSync(dir, {withFileTypes: true}).reduce((files, file) => {
const filePath = path.join(dir, file.name)
const relativePath = path.join(base, file.name)
if(file.isDirectory()) {
return files.concat(getFiles(filePath, relativePath))
} else if(file.isFile()) {
file.__fullPath = filePath
file.__relateivePath = relativePath
return files.concat(file)
}
}, [])
functional programming
const fs = require('fs')
const path = require('path')
const R = require('ramda')
const getDirectories = pathName => {
const isDirectory = pathName => fs.lstatSync(pathName).isDirectory()
const mapDirectories = pathName => R.map(name => path.join(pathName, name), fs.readdirSync(pathName))
const filterDirectories = listPaths => R.filter(isDirectory, listPaths)
return {
paths:R.pipe(mapDirectories)(pathName),
pathsFiltered: R.pipe(mapDirectories, filterDirectories)(pathName)
}
}
You could use dree, if using a module is affordable
const dree = require('dree');
const options = {
depth: 1
};
const fileCallback = function() {};
const directories = [];
const dirCallback = function(dir) {
directories.push(dir.name);
};
dree.scan('./dir', {});
console.log(directories);
The directories which are directed children of the specified path ("./dir") will be printed.
If you do not put the option depth: 1, you would even obtain all the directories in a recursively way, so not only the directed children of the specified path.

fs.readdir not reading files extracted to a local directory (function not even running)

I have extracted zip files to a local directory within my lambda function however fs.readdir function is not working and is also not returning an error. I think maybe I have a syntax error with the way I set up the function to run asynchronously.
// Make Directories for Zipped and Unzipped files
try {
const zipDirFolder = await mkdirp(localZippedFolder, { recursive: true });
const unZipDirFolder = await mkdirp(localUnzippedFolder, { recursive: true });
console.log(unZipDirFolder);
// Download files from s3 Bucket
let newFolder = await s3.getObject(params).promise()
.then(data => {
console.log(data);
return data;
});
const newZipFile = newFolder.Body;
// Extract files from zipped folder and store them in a local directory
let filezFolder = await fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}));
console.log(unZipDirFolder);
// Read Contents of that local directory
await fs.readdir(unZipDirFolder, function (err, files) {
if (err) {
console.log('THERE WAS AN ERROR:' + err);
} else {
console.log(files);
}
});
}
Problem: fs.readdir is not running nor is it returning an error message.
There is a problem is in this statement:
let filezFolder = await fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}));
fs.createReadStream().pipe() does not return a promise. So using await on it doesn't do anything useful. So, you have not waited until the .pipe() is done. Thus, you are calling fs.readdir() before the unzip operation is done.
At the moment, streams are not particularly promise-friendly. You will probably need to register for some sort of event on the resulting stream that indicates when your unzip operation is done and then do the fs.readdir() from within that event handler. I don't know your unzip library myself to know which event or how to best monitor it for completion.
Since .pipe() return a writable stream, you can probably do this:
fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}))
.on('finish', () => {
fs.readdir(...)
}).on('error', (err) => {
// error handling here
console.log(err);
});
Here's a simple little stand-alone program that shows the concept I'm talking about:
const unzipper = require('unzipper');
const fs = require('fs');
const fsp = fs.promises;
const path = require('path');
const inputZip = path.join(__dirname, "zip", "photos.zip");
const outputZipDir = path.join(__dirname, "zip-output")
function run() {
fs.createReadStream(inputZip)
.pipe(unzipper.Extract({ path: outputZipDir }))
.on('finish', async () => {
let files = await fsp.readdir(outputZipDir);
console.log(files);
// use the files here
}).on('err', err => {
console.log(err);
});
}
run();
The setup for this program is to put photos.zip into a zip subdirectory from where this program is run and to create a zip-output subdirectory from where this program is run for the unzipped files to go.
And, here's a promisified version that uses the same setup:
const unzipper = require('unzipper');
const fs = require('fs');
const fsp = fs.promises;
const path = require('path');
const inputZip = path.join(__dirname, "zip", "photos.zip");
const outputZipDir = path.join(__dirname, "zip-output")
// returns a promise that resolves/rejects when the unzip operation is done
function unzip(inputZip, outputDir) {
return new Promise((resolve, reject) => {
fs.createReadStream(inputZip)
.pipe(unzipper.Extract({ path: outputDir }))
.on('finish', resolve)
.on('err', reject);
});
}
async function run2() {
await unzip(inputZip, outputZipDir);
let files = await fsp.readdir(outputZipDir);
console.log(files);
}
run2().catch(err => {
console.log(err);
});
This is probably because you'r trying to await fs.readdir and at the same time trying to callback. You shouldn't do both. Try removing the await or setting your lambda function async and remove the callback.
//Using await (Don't forget to place this in a async function)
const res = await fs.readdir(yourFilePath);
//Using Callbacks
fs.readdir(yourFilePath, (err, files) => {
if (err) {
console.log('THERE WAS AN ERROR:' + err);
} else {
console.log(files);
}
});
Or you could use fs.readdirSync and do
const res = fs.readdirSync(yourFilePath)

Rename files asynchronously in Node.js if destination files don't exist

I am trying to rename files asynchronously in Node.js only if destination files don't exist.
I made a quick test like follows:
const fs = require('fs')
const files = [ 'file1', 'file2', 'file3' ]
const new_name = 'new-name' // same destination name for all
fs.exists() - DEPRECATED
for (let file of files)
fs.exists(new_name, (exists) => {
if (!exists) fs.rename(file, new_name, (err) => {})
})
fs.access() - RECOMMENDED
for (let file of files)
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) fs.rename(file, new_name, (err) => {})
})
fs.move() - from fs-extra
const fs_extra = require('fs-extra')
for (let file of files)
fs_extra.move(file, new_name, { overwrite: false }, (err) => {})
Each time all 3 files were overwriten and renamed to one file.
I believe this is happens because all exists checks fire sooner than any rename happens.
I know how to accomplish this task synchronously, but want to be sure that there is no proper async way to do so.
You can create Promise which resolve's when file is renamed
fs.rename(file, new_name, (err) => {
resolve(); <------
});
or when renaming is skipped
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) {
return fs.rename(file, new_name, (err) => {
resolve();
});
}
resolve(); <------
});
Full code
(async () => {
for (let file of files) {
await new Promise((resolve) => {
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) {
return fs.rename(file, new_name, (err) => {
resolve();
});
}
resolve();
});
});
}
})();
and if you don't want to mix async/await with Promise
(async () => {
function rename(file, new_name) {
return new Promise((resolve) => {
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) {
return fs.rename(file, new_name, (err) => {
resolve();
});
}
resolve();
});
});
}
for (let file of files) {
await rename(file, new_name);
}
})();
#ponury-kostek solution works brilliantly and marked as accepted answer.
I ended up with the following code since it's a bit shorter:
async function rename_files() {
for (let file of files)
await fs.move(file, new_name)
}
rename_files()
Instead of wrapping fs library in promises.
I like to import the promise implementation of the fs library.
Then call the fs methods with await.
.
import {promises as fs_promises} from 'fs'; // The promise implmentation of fs library.
async function renameFile() {
const fileFullPath = '1234.txt';
const newFileFullPath = '5678.txt';
await fs_promises.rename(fileFullPath, newFileFullPath, (error) => {
if (error) {
console.log(error);
} else {
console.log("\nFile Renamed\n");
}
});
}
await renameFile(); // Call the async method.

How to export module asynchronously in nodejs

I cannot export a module that I wrote it myself in an asynchronous way.
const glob = require('glob');
var confFiles;
glob("conf/**/*.conf", function (er, files) {
confFiles = files;
});
module.exports = new Promise(function(resolve, reject) {
resolve(confFiles);
});
This is the module itself and I want to access confFiles in other files but the point is that glob is not asynchronous and I'm having trouble finding my way to solve it.
Resolve when the callback calls back:
module.exports = new Promise(function(resolve, reject) {
glob("conf/**/*.conf", function (err, files) {
if(err) reject(err) else resolve(files);
});
}));
Or a bit shorter:
const glob = require("glob");
const { promisify } = require("util");
module.exports = promisify(glob)("conf/**/*.conf");
I'd export a load method instead:
// conf.js
const glob = require('glob')
module.exports.load = () => new Promise((resolve, reject) => {
glob('conf/**/*.conf', function (err, files) {
if (err) return reject(err)
resolve(files)
})
})
And then in userland:
// index.js
const conf = require('./conf.js')
conf.load()
.then(files => {
console.log(files)
})
Or, you can just use globe.sync instead and avoid dealing with async code entirely:
// conf.js
const glob = require('glob')
module.exports = glob.sync('conf/**/*.conf')
And then in userland:
// index.js
const files = require('./conf.js')
console.log(files)
Just keep in mind that globe.sync is a blocking operation.

Categories