How to test recursive function in Jest.js - javascript

I have an script to looping over directories and match files with specific type. Unfortunately jest passes this test before it ends. I know why, but I don't know how to make script to wait for the end of looping.
import fs from 'fs'
const path = require('path');
describe('something', () => {
it('should something', () => {
const traverseDir = (dir, callback) => {
fs.readdirSync(dir).forEach(file => {
let fullPath = path.join(dir, file);
if (fs.lstatSync(fullPath).isDirectory()) {
callback(fullPath)
traverseDir(fullPath, callback);
} else {
callback(fullPath)
}
});
}
traverseDir('src/', (fullPath) => {
const splitted = fullPath.split('/')
const filename = splitted[splitted.length - 1]
if (filename.match(/.*.foo/)) {
fs.readFile(fullPath, 'utf8', (err, data) => {
expect(err).toBe(null)
// some assertion
})
}
})
})
})

You could pass done in the test parameter and call it when the test ends.
You can read more about async testing here.
import fs from "fs";
const path = require("path");
describe("something", () => {
it("should something", done => {
const traverseDir = (dir, callback) => {
fs.readdirSync(dir).forEach(file => {
let fullPath = path.join(dir, file);
if (fs.lstatSync(fullPath).isDirectory()) {
callback(fullPath);
traverseDir(fullPath, callback);
} else {
callback(fullPath);
}
});
done(); // Call done to tell Jest that the test has finished.
};
traverseDir("src/", fullPath => {
const splitted = fullPath.split("/");
const filename = splitted[splitted.length - 1];
if (filename.match(/.*.foo/)) {
fs.readFile(fullPath, "utf8", (err, data) => {
expect(err).toBe(null);
});
}
});
});
});

You should use fs.promises functions to list the contents of your directory recursively to obtain a single unified file list.
Unit test this function separately from any code that actually reads the file. (e.g.: your filename.match and readFile code should be tested separately from the traverseDir code.)
Example of walking directories asynchronously to get a unified file list:
This asynchronous allFilesIn function gets all files within a directory recursively and returns the list as a single array with full (relative) paths.
const fs = require('fs').promises;
const path = require('path');
const allFilesIn = async (dir, results = []) => {
const files = await fs.readdir(dir);
for (file of files) {
const fullPath = path.join(dir, file);
const stat = await fs.stat(fullPath);
if (stat.isDirectory()) {
await allFilesIn(fullPath, results);
} else {
results.push(fullPath);
}
}
return results;
}
// Example call:
allFilesIn('src/').then(files => {
console.log(files); // e.g.: [ 'src\\foo.cpp', 'src\\bar.cpp', 'src\\include\\foo.h' ]
});
Once you have a single array of all the files it should be easy to use a single forEach to do something for all the files in the unified list.

Related

How do I get an array of folders name in an certain directory? [duplicate]

I was hoping this would be a simple thing, but I cannot find anything out there to do so.
I just want to get all folders/directories within a given folder/directory.
So for example:
<MyFolder>
|- SomeFolder
|- SomeOtherFolder
|- SomeFile.txt
|- SomeOtherFile.txt
|- x-directory
I would expect to get an array of:
["SomeFolder", "SomeOtherFolder", "x-directory"]
Or the above with the path if that was how it was served...
So does anything already exist to do the above?
Promise
import { readdir } from 'fs/promises'
const getDirectories = async source =>
(await readdir(source, { withFileTypes: true }))
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
Callback
import { readdir } from 'fs'
const getDirectories = (source, callback) =>
readdir(source, { withFileTypes: true }, (err, files) => {
if (err) {
callback(err)
} else {
callback(
files
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
)
}
})
Syncronous
import { readdirSync } from 'fs'
const getDirectories = source =>
readdirSync(source, { withFileTypes: true })
.filter(dirent => dirent.isDirectory())
.map(dirent => dirent.name)
List directories using a path.
function getDirectories(path) {
return fs.readdirSync(path).filter(function (file) {
return fs.statSync(path+'/'+file).isDirectory();
});
}
Recursive solution
I came here in search of a way to get all of the subdirectories, and all of their subdirectories, etc. Building on the accepted answer, I wrote this:
const fs = require('fs');
const path = require('path');
function flatten(lists) {
return lists.reduce((a, b) => a.concat(b), []);
}
function getDirectories(srcpath) {
return fs.readdirSync(srcpath)
.map(file => path.join(srcpath, file))
.filter(path => fs.statSync(path).isDirectory());
}
function getDirectoriesRecursive(srcpath) {
return [srcpath, ...flatten(getDirectories(srcpath).map(getDirectoriesRecursive))];
}
This should do it:
CoffeeScript (sync)
fs = require 'fs'
getDirs = (rootDir) ->
files = fs.readdirSync(rootDir)
dirs = []
for file in files
if file[0] != '.'
filePath = "#{rootDir}/#{file}"
stat = fs.statSync(filePath)
if stat.isDirectory()
dirs.push(file)
return dirs
CoffeeScript (async)
fs = require 'fs'
getDirs = (rootDir, cb) ->
fs.readdir rootDir, (err, files) ->
dirs = []
for file, index in files
if file[0] != '.'
filePath = "#{rootDir}/#{file}"
fs.stat filePath, (err, stat) ->
if stat.isDirectory()
dirs.push(file)
if files.length == (index + 1)
cb(dirs)
JavaScript (async)
var fs = require('fs');
var getDirs = function(rootDir, cb) {
fs.readdir(rootDir, function(err, files) {
var dirs = [];
for (var index = 0; index < files.length; ++index) {
var file = files[index];
if (file[0] !== '.') {
var filePath = rootDir + '/' + file;
fs.stat(filePath, function(err, stat) {
if (stat.isDirectory()) {
dirs.push(this.file);
}
if (files.length === (this.index + 1)) {
return cb(dirs);
}
}.bind({index: index, file: file}));
}
}
});
}
Alternatively, if you are able to use external libraries, you can use filehound. It supports callbacks, promises and sync calls.
Using promises:
const Filehound = require('filehound');
Filehound.create()
.path("MyFolder")
.directory() // only search for directories
.find()
.then((subdirectories) => {
console.log(subdirectories);
});
Using callbacks:
const Filehound = require('filehound');
Filehound.create()
.path("MyFolder")
.directory()
.find((err, subdirectories) => {
if (err) return console.error(err);
console.log(subdirectories);
});
Sync call:
const Filehound = require('filehound');
const subdirectories = Filehound.create()
.path("MyFolder")
.directory()
.findSync();
console.log(subdirectories);
For further information (and examples), check out the docs: https://github.com/nspragg/filehound
Disclaimer: I'm the author.
With node.js version >= v10.13.0, fs.readdirSync will return an array of fs.Dirent objects if withFileTypes option is set to true.
So you can use,
const fs = require('fs')
const directories = source => fs.readdirSync(source, {
withFileTypes: true
}).reduce((a, c) => {
c.isDirectory() && a.push(c.name)
return a
}, [])
var getDirectories = (rootdir , cb) => {
fs.readdir(rootdir, (err, files) => {
if(err) throw err ;
var dirs = files.map(filename => path.join(rootdir,filename)).filter( pathname => fs.statSync(pathname).isDirectory());
return cb(dirs);
})
}
getDirectories( myDirectories => console.log(myDirectories));``
Using fs-extra, which promises the async fs calls, and the new await async syntax:
const fs = require("fs-extra");
async function getDirectories(path){
let filesAndDirectories = await fs.readdir(path);
let directories = [];
await Promise.all(
filesAndDirectories.map(name =>{
return fs.stat(path + name)
.then(stat =>{
if(stat.isDirectory()) directories.push(name)
})
})
);
return directories;
}
let directories = await getDirectories("/")
This answer does not use blocking functions like readdirSync or statSync. It does not use external dependencies nor find itself in the depths of callback hell.
Instead we use modern JavaScript conveniences like Promises and and async-await syntaxes. And asynchronous results are processed in parallel; not sequentially -
const { readdir, stat } =
require ("fs") .promises
const { join } =
require ("path")
const dirs = async (path = ".") =>
(await stat (path)) .isDirectory ()
? Promise
.all
( (await readdir (path))
.map (p => dirs (join (path, p)))
)
.then
( results =>
[] .concat (path, ...results)
)
: []
I'll install an example package, and then test our function -
$ npm install ramda
$ node
Let's see it work -
> dirs (".") .then (console.log, console.error)
[ '.'
, 'node_modules'
, 'node_modules/ramda'
, 'node_modules/ramda/dist'
, 'node_modules/ramda/es'
, 'node_modules/ramda/es/internal'
, 'node_modules/ramda/src'
, 'node_modules/ramda/src/internal'
]
Using a generalised module, Parallel, we can simplify the definition of dirs -
const Parallel =
require ("./Parallel")
const dirs = async (path = ".") =>
(await stat (path)) .isDirectory ()
? Parallel (readdir (path))
.flatMap (f => dirs (join (path, f)))
.then (results => [ path, ...results ])
: []
The Parallel module used above was a pattern that was extracted from a set of functions designed to solve a similar problem. For more explanation, see this related Q&A.
And a async version of getDirectories, you need the async module for this:
var fs = require('fs');
var path = require('path');
var async = require('async'); // https://github.com/caolan/async
// Original function
function getDirsSync(srcpath) {
return fs.readdirSync(srcpath).filter(function(file) {
return fs.statSync(path.join(srcpath, file)).isDirectory();
});
}
function getDirs(srcpath, cb) {
fs.readdir(srcpath, function (err, files) {
if(err) {
console.error(err);
return cb([]);
}
var iterator = function (file, cb) {
fs.stat(path.join(srcpath, file), function (err, stats) {
if(err) {
console.error(err);
return cb(false);
}
cb(stats.isDirectory());
})
}
async.filter(files, iterator, cb);
});
}
Fully async version with ES6, only native packages, fs.promises and async/await, does file operations in parallel:
const fs = require('fs');
const path = require('path');
async function listDirectories(rootPath) {
const fileNames = await fs.promises.readdir(rootPath);
const filePaths = fileNames.map(fileName => path.join(rootPath, fileName));
const filePathsAndIsDirectoryFlagsPromises = filePaths.map(async filePath => ({path: filePath, isDirectory: (await fs.promises.stat(filePath)).isDirectory()}))
const filePathsAndIsDirectoryFlags = await Promise.all(filePathsAndIsDirectoryFlagsPromises);
return filePathsAndIsDirectoryFlags.filter(filePathAndIsDirectoryFlag => filePathAndIsDirectoryFlag.isDirectory)
.map(filePathAndIsDirectoryFlag => filePathAndIsDirectoryFlag.path);
}
Tested, it works nicely.
You can use graph-fs
const {Node} = require("graph-fs");
const directory = new Node("/path/to/directory");
const subDirectories = directory.children.filter(child => child.is.directory);
Using the glob package, just add a trailing slash to find directories only:
import {promise as glob} from "glob-promise"
const firstLevelFolders = await glob("MyFolder/*/")
const recursiveFolders = await glob("MyFolder/**/")
CoffeeScript version of this answer, with proper error handling:
fs = require "fs"
{join} = require "path"
async = require "async"
get_subdirs = (root, callback)->
fs.readdir root, (err, files)->
return callback err if err
subdirs = []
async.each files,
(file, callback)->
fs.stat join(root, file), (err, stats)->
return callback err if err
subdirs.push file if stats.isDirectory()
callback null
(err)->
return callback err if err
callback null, subdirs
Depends on async
Alternatively, use a module for this!
(There are modules for everything. [citation needed])
If you need to use all async version. You can have something like this.
Record the directory length, uses it as an indicator to tell if all async stat tasks are finished.
If the async stat tasks are finished, all the file stat has been checked, so call the callback
This will only work as long as Node.js is single thread, because it assumes no two async tasks will increase the counter at the same time.
'use strict';
var fs = require("fs");
var path = require("path");
var basePath = "./";
function result_callback(results) {
results.forEach((obj) => {
console.log("isFile: " + obj.fileName);
console.log("fileName: " + obj.isFile);
});
};
fs.readdir(basePath, (err, files) => {
var results = [];
var total = files.length;
var finished = 0;
files.forEach((fileName) => {
// console.log(fileName);
var fullPath = path.join(basePath, fileName);
fs.stat(fullPath, (err, stat) => {
// this will work because Node.js is single thread
// therefore, the counter will not increment at the same time by two callback
finished++;
if (stat.isFile()) {
results.push({
fileName: fileName,
isFile: stat.isFile()
});
}
if (finished == total) {
result_callback(results);
}
});
});
});
As you can see, this is a "depth first" approach, and this could result in callback hell, and it is not quite "functional" . People try to solve this problem with Promise, by wrapping the async task into an Promise object.
'use strict';
var fs = require("fs");
var path = require("path");
var basePath = "./";
function result_callback(results) {
results.forEach((obj) => {
console.log("isFile: " + obj.fileName);
console.log("fileName: " + obj.isFile);
});
};
fs.readdir(basePath, (err, files) => {
var results = [];
var total = files.length;
var finished = 0;
var promises = files.map((fileName) => {
// console.log(fileName);
var fullPath = path.join(basePath, fileName);
return new Promise((resolve, reject) => {
// try to replace fullPath wil "aaa", it will reject
fs.stat(fullPath, (err, stat) => {
if (err) {
reject(err);
return;
}
var obj = {
fileName: fileName,
isFile: stat.isFile()
};
resolve(obj);
});
});
});
Promise.all(promises).then((values) => {
console.log("All the promise resolved");
console.log(values);
console.log("Filter out folder: ");
values
.filter((obj) => obj.isFile)
.forEach((obj) => {
console.log(obj.fileName);
});
}, (reason) => {
console.log("Not all the promise resolved");
console.log(reason);
});
});
use fs、path module can got the folder. this use Promise. If your will get the fill, your can change isDirectory() to isFile() Nodejs--fs--fs.Stats.At last, you can get the file'name file'extname and so on Nodejs---Path
var fs = require("fs"),
path = require("path");
//your <MyFolder> path
var p = "MyFolder"
fs.readdir(p, function (err, files) {
if (err) {
throw err;
}
//this can get all folder and file under <MyFolder>
files.map(function (file) {
//return file or folder path, such as **MyFolder/SomeFile.txt**
return path.join(p, file);
}).filter(function (file) {
//use sync judge method. The file will add next files array if the file is directory, or not.
return fs.statSync(file).isDirectory();
}).forEach(function (files) {
//The files is array, so each. files is the folder name. can handle the folder.
console.log("%s", files);
});
});
Just in case anyone else ends up here from a web search, and has Grunt already in their dependency list, the answer to this becomes trivial. Here's my solution:
/**
* Return all the subfolders of this path
* #param {String} parentFolderPath - valid folder path
* #param {String} glob ['/*'] - optional glob so you can do recursive if you want
* #returns {String[]} subfolder paths
*/
getSubfolders = (parentFolderPath, glob = '/*') => {
return grunt.file.expand({filter: 'isDirectory'}, parentFolderPath + glob);
}
Another recursive approach
Thanks to Mayur for knowing me about withFileTypes. I written following code for getting files of particular folder recursively. It can be easily modified to get only directories.
const getFiles = (dir, base = '') => readdirSync(dir, {withFileTypes: true}).reduce((files, file) => {
const filePath = path.join(dir, file.name)
const relativePath = path.join(base, file.name)
if(file.isDirectory()) {
return files.concat(getFiles(filePath, relativePath))
} else if(file.isFile()) {
file.__fullPath = filePath
file.__relateivePath = relativePath
return files.concat(file)
}
}, [])
functional programming
const fs = require('fs')
const path = require('path')
const R = require('ramda')
const getDirectories = pathName => {
const isDirectory = pathName => fs.lstatSync(pathName).isDirectory()
const mapDirectories = pathName => R.map(name => path.join(pathName, name), fs.readdirSync(pathName))
const filterDirectories = listPaths => R.filter(isDirectory, listPaths)
return {
paths:R.pipe(mapDirectories)(pathName),
pathsFiltered: R.pipe(mapDirectories, filterDirectories)(pathName)
}
}
You could use dree, if using a module is affordable
const dree = require('dree');
const options = {
depth: 1
};
const fileCallback = function() {};
const directories = [];
const dirCallback = function(dir) {
directories.push(dir.name);
};
dree.scan('./dir', {});
console.log(directories);
The directories which are directed children of the specified path ("./dir") will be printed.
If you do not put the option depth: 1, you would even obtain all the directories in a recursively way, so not only the directed children of the specified path.

getting fs.readme to work with async await and promisify

I am trying to create a node script that looks into my inbox of recently created markdown notes, finds the metadata at the top of that note, finds the category within that metadata and then moves the file into its relevant folder. I keep getting the error message:
(node:82025) UnhandledPromiseRejectionWarning: ReferenceError: paths is not defined
I believe this is because my code is not waiting for the readFile function to finish, despite me having labeled it as async/await and having wrapped it in promisify at the top of the file. Could someone please point me in the direction as to where I'm going wrong? Complete code below (other dependencies are for another script):
const fs = require('fs');
const path = require('path');
const firstline = require('firstline');
const moment = require('moment');
const { promisify } = require('util');
const readFile = promisify(fs.readFile);
const pathToResources =
'/Users/Rob/Library/Mobile Documents/9CR7T2DMDG~com~ngocluu~onewriter/Documents/Test/Resources/';
function markdownNotesOnly(files) {
return (notes = files.filter((file) => path.extname(file) === '.md'));
}
async function getNewPath(note) {
let oldPath = path.join(__dirname, note);
let paths = {};
let newPath = '';
readFile(oldPath, (err, data) => {
if (err) throw err;
let metadata = data
.toString()
.match(/-{3}\n.*\n-{3}/gms)
.toString();
let category = metadata.match(/#§-\d{3}-.*/);
if (!category) {
paths = { oldPath, newPath };
return paths;
}
category = category.toString().substring(3);
newPath = path.join(pathToResources, category);
paths = { oldPath, newPath };
return paths;
});
}
function sortFiles(startPath) {
fs.readdir(startPath, (err, files) => {
if (err) throw err;
markdownNotesOnly(files); // returns array called notes
notes.map(async (note) => {
await getNewPath(note);
let { oldPath, newPath } = paths;
fs.rename(oldPath, path.join(newPath, note), (err) => {
if (err) throw err;
console.log('File moved successfully');
});
});
});
}
Update:
Followed the advice of Mestre San below and got it working this morning. Here is my finished code if it is of use to anybody.
import { readdir, rename } from 'fs';
import { promises as fsPromises } from 'fs';
import { resolve, extname, join } from 'path';
const __dirname = resolve();
const pathToResources =
'/Users/Rob/Library/Mobile Documents/9CR7T2DMDG~com~ngocluu~onewriter/Documents/Test/Resources/';
function markdownNotesOnly(files) {
let notes = files.filter((file) => extname(file) === '.md');
return notes;
}
async function getPaths(note) {
const metadataRegex = /-{3}\n.*\n-{3}/gms;
const categoryRegex = /#§-\d{3}-.*/;
let oldPath = join(__dirname, note);
let newPath = '';
const file = await fsPromises.readFile(oldPath);
if (file instanceof Error) throw err;
if (!file.toString().match(metadataRegex)) {
let paths = { oldPath, newPath };
return paths;
}
let metadata = file.toString().match(metadataRegex).toString();
let category = metadata.match(categoryRegex);
if (!category) {
let paths = { oldPath, newPath };
return paths;
}
category = category.toString().substring(3);
newPath = join(pathToResources, category);
let paths = { oldPath, newPath };
return paths;
}
export function sortFiles(startPath) {
readdir(startPath, (err, files) => {
if (err) throw err;
let notes = markdownNotesOnly(files);
notes.map(async (note) => {
let paths = await getPaths(note);
let { oldPath, newPath } = paths;
rename(oldPath, join(newPath, note), (err) => {
if (err) throw err;
console.log('File moved successfully');
});
});
});
}
The snippet you posted is definitely incomplete since we can only give advice based on what we see here it goes my considerations:
You are creating a promisified function but you are using it with a callback. You should pick one style. The way the code is now your callback will never be called. You have to either use fs.readFile there or readFile(oldPath).then(data =>. Even better would be to use fs.promises.readFile if it is available on the Node.js version you are using
I can that the paths variable is indeed not defined on line inside the sortFiles function. The code is declaring that variable inside the function getNewPath making it unavailable inside the sortFiles function.
The variable notes is not defined either. The code is creating the notes variable inside the function markdownNotesOnly. Although sharing these values across function using the global scope is not encouraged if you choose to do that you should definitely declare it outside that function with let notes.
The sortFiles is not being called but since it is using an async function and you are not passing any callback to it as an argument it will pretty much work as a fire and forget kinda thing
The sortFiles function has the word sort in its name but it has a rename call in its body, that can be confusing even for yourself in the future.
But, the way to make your code stop throwing that very same error is by doing something like this:
'use strict'
const fs = require('fs')
const path = require('path')
// const firstline = require('firstline')
// const moment = require('moment')
const { promisify } = require('util')
const readFile = promisify(fs.readFile)
const pathToResources =
'/Users/Rob/Library/Mobile Documents/9CR7T2DMDG~com~ngocluu~onewriter/Documents/Test/Resources/'
let notes, paths
function markdownNotesOnly (files) {
'use strict'
return (notes = files.filter((file) => path.extname(file) === '.md'))
}
async function getNewPath (note) {
const oldPath = path.join(__dirname, note)
// let paths = {}
let newPath = ''
return readFile(oldPath).then(data => {
const metadata = data
.toString()
.match(/-{3}\n.*\n-{3}/gms)
.toString()
let category = metadata.match(/#§-\d{3}-.*/)
if (!category) {
paths = { oldPath, newPath }
return paths
}
category = category.toString().substring(3)
newPath = path.join(pathToResources, category)
paths = { oldPath, newPath }
return paths
})
}
function sortFiles (startPath) {
fs.readdir(startPath, (err, files) => {
if (err) throw err
markdownNotesOnly(files) // returns array called notes
notes.map(async (note) => {
await getNewPath(note)
const { oldPath, newPath } = paths
fs.rename(oldPath, path.join(newPath, note), (err) => {
if (err) throw err
console.log('File moved successfully')
})
})
})
}

fs.readdir not reading files extracted to a local directory (function not even running)

I have extracted zip files to a local directory within my lambda function however fs.readdir function is not working and is also not returning an error. I think maybe I have a syntax error with the way I set up the function to run asynchronously.
// Make Directories for Zipped and Unzipped files
try {
const zipDirFolder = await mkdirp(localZippedFolder, { recursive: true });
const unZipDirFolder = await mkdirp(localUnzippedFolder, { recursive: true });
console.log(unZipDirFolder);
// Download files from s3 Bucket
let newFolder = await s3.getObject(params).promise()
.then(data => {
console.log(data);
return data;
});
const newZipFile = newFolder.Body;
// Extract files from zipped folder and store them in a local directory
let filezFolder = await fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}));
console.log(unZipDirFolder);
// Read Contents of that local directory
await fs.readdir(unZipDirFolder, function (err, files) {
if (err) {
console.log('THERE WAS AN ERROR:' + err);
} else {
console.log(files);
}
});
}
Problem: fs.readdir is not running nor is it returning an error message.
There is a problem is in this statement:
let filezFolder = await fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}));
fs.createReadStream().pipe() does not return a promise. So using await on it doesn't do anything useful. So, you have not waited until the .pipe() is done. Thus, you are calling fs.readdir() before the unzip operation is done.
At the moment, streams are not particularly promise-friendly. You will probably need to register for some sort of event on the resulting stream that indicates when your unzip operation is done and then do the fs.readdir() from within that event handler. I don't know your unzip library myself to know which event or how to best monitor it for completion.
Since .pipe() return a writable stream, you can probably do this:
fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}))
.on('finish', () => {
fs.readdir(...)
}).on('error', (err) => {
// error handling here
console.log(err);
});
Here's a simple little stand-alone program that shows the concept I'm talking about:
const unzipper = require('unzipper');
const fs = require('fs');
const fsp = fs.promises;
const path = require('path');
const inputZip = path.join(__dirname, "zip", "photos.zip");
const outputZipDir = path.join(__dirname, "zip-output")
function run() {
fs.createReadStream(inputZip)
.pipe(unzipper.Extract({ path: outputZipDir }))
.on('finish', async () => {
let files = await fsp.readdir(outputZipDir);
console.log(files);
// use the files here
}).on('err', err => {
console.log(err);
});
}
run();
The setup for this program is to put photos.zip into a zip subdirectory from where this program is run and to create a zip-output subdirectory from where this program is run for the unzipped files to go.
And, here's a promisified version that uses the same setup:
const unzipper = require('unzipper');
const fs = require('fs');
const fsp = fs.promises;
const path = require('path');
const inputZip = path.join(__dirname, "zip", "photos.zip");
const outputZipDir = path.join(__dirname, "zip-output")
// returns a promise that resolves/rejects when the unzip operation is done
function unzip(inputZip, outputDir) {
return new Promise((resolve, reject) => {
fs.createReadStream(inputZip)
.pipe(unzipper.Extract({ path: outputDir }))
.on('finish', resolve)
.on('err', reject);
});
}
async function run2() {
await unzip(inputZip, outputZipDir);
let files = await fsp.readdir(outputZipDir);
console.log(files);
}
run2().catch(err => {
console.log(err);
});
This is probably because you'r trying to await fs.readdir and at the same time trying to callback. You shouldn't do both. Try removing the await or setting your lambda function async and remove the callback.
//Using await (Don't forget to place this in a async function)
const res = await fs.readdir(yourFilePath);
//Using Callbacks
fs.readdir(yourFilePath, (err, files) => {
if (err) {
console.log('THERE WAS AN ERROR:' + err);
} else {
console.log(files);
}
});
Or you could use fs.readdirSync and do
const res = fs.readdirSync(yourFilePath)

function is being executed twice when called only once

I am making a program that will read content of files inside nested folders.For now I am just trying to log the content of the file in console. But I am getting two logs instead of only one.Here is what I have done till now
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const getStats = promisify(fs.stat);
const readdir = promisify(fs.readdir);
const http = require('http');
handle_files = async (req, res) => {
let files = await scanDir("logs_of_109");
let result = await read_content(files)
check_file_content(result)
res.writeHead(200, { 'Content-Type': 'text/html' });
// console.log(result)
res.write("Hello");
res.end();
};
check_file_content = (file_data) => {
console.log(file_data[1])
}
async function read_content(files) {
let file_data = []
files.map(file => {
let start_index = file.toString().lastIndexOf('.') + 1
let ext = file.substring(start_index, file.length)
if (ext == 'data') {
file_data.push(fs.readFileSync(file, { encoding: 'utf-8' }))
}
})
return file_data
}
http.createServer(handle_files).listen(8080)
async function scanDir(dir, fileList = []) {
// fetch list of files from the giver directory
let files = await readdir(dir);
// loop through all the files
for (let file of files) {
// join new folder name after the parent folder
// logs_of_109/24
let filePath = path.join(dir, file);
try {
//
let stats = await getStats(filePath);
if (!stats.isDirectory()) {
// add the filepath to the array
fileList.push(filePath);
}
if (stats.isDirectory()) {
await scanDir(filePath, fileList);
}
} catch (err) {
// Drop on the floor..
}
}
return fileList;
}
I expect the file content to be logged only once but it is logging twice on my console. Why is this happening and how do I stop this?
Your browser is making two requests to your server, most likely one for the URL you put in the address bar and another for favicon.ico. (You can quickly tell by opening the dev tools on your browser and going to the Network tab.)
handleFiles should look at req (specifically its url property) and act according to what's being requested. (This is something the code should be doing anyway.)
Side note 1: You're passing an async function into something (createServer) that won't do anything with the promise it returns. If you do that, it's important to catch any errors in the function locally within the function, since (again) nothing else is going to handle them. E.g.:
handle_files = async (req, res) => {
try {
let files = await scanDir("logs_of_109");
let result = await read_content(files)
check_file_content(result)
res.writeHead(200, { 'Content-Type': 'text/html' });
// console.log(result)
res.write("Hello");
res.end();
} catch (e) {
// ...handle error here...
}
};
Side note 2: That code is falling prey to The Horror of Implicit Globals¹. Declare your variables in the appropriate scope. Not declaring them, in loose mode, makes them globals. (Also recommend using strict mode, so you get an error for this.)
¹ (that's a post on my anemic little blog)
The answer above is correct.
My approach is to solve it via 'routing' of any kind.
Here is small basic example of how it can be done
const fs = require('fs');
const path = require('path');
const { promisify } = require('util');
const getStats = promisify(fs.stat);
const readdir = promisify(fs.readdir);
const http = require('http');
handle_routes = async (req, res) => {
switch(req.url) {
case '/files':
handle_files(req, res);
default:
console.log('for default page');
}
}
handle_files = async (req, res) => {
let files = await scanDir("logs_of_109");
let result = await read_content(files)
check_file_content(result)
res.writeHead(200, { 'Content-Type': 'text/html' });
res.write("Hello");
res.end();
};
check_file_content = (file_data) => {
console.log(file_data[1])
}
async function read_content(files) {
let file_data = []
files.map(file => {
let start_index = file.toString().lastIndexOf('.') + 1
let ext = file.substring(start_index, file.length)
if (ext == 'data') {
file_data.push(fs.readFileSync(file, { encoding: 'utf-8' }))
}
})
return file_data
}
http.createServer(handle_routes).listen(8080)
async function scanDir(dir, fileList = []) {
// fetch list of files from the giver directory
let files = await readdir(dir);
// loop through all the files
for (let file of files) {
// join new folder name after the parent folder
// logs_of_109/24
let filePath = path.join(dir, file);
try {
//
let stats = await getStats(filePath);
if (!stats.isDirectory()) {
// add the filepath to the array
fileList.push(filePath);
}
if (stats.isDirectory()) {
await scanDir(filePath, fileList);
}
} catch (err) {
// Drop on the floor..
}
}
return fileList;
}
This gives you possibility to call handle_files function by going to localhost:8080/files url

fs.readdir ignore directories

I would like to scan the folder, but ignore all the folders/directories that are included in it. All I have in the (C:/folder/) are .txt files and other folders, I just want to scan the txt files, and ignore the folders.
app.get('/generatE', function (req, res) {
const logsFolder = 'C:/folder/';
fs.readdir(logsFolder, (err, files) => {
if (err) {
res.send("[empty]");
return;
}
var lines = [];
files.forEach(function(filename) {
var logFileLines = fs.readFileSync (logsFolder + filename, 'ascii').toString().split("\n");
logFileLines.forEach(function(logFileLine) {
if(logFileLine.match(/.*AUDIT*./)) {
lines.push(logFileLine+'\n');
}
})
})
Use fs.readdir or fs.readdirSync method with options { withFileTypes: true } and then do filtration using dirent.isFile (requires Node 10.10+).
Sync version
const fs = require('fs');
const dirents = fs.readdirSync(DIRECTORY_PATH, { withFileTypes: true });
const filesNames = dirents
.filter(dirent => dirent.isFile())
.map(dirent => dirent.name);
// use filesNames
Async version (with async/await, requires Node 11+)
import { promises as fs } from 'fs';
async function listFiles(directory) {
const dirents = await fs.readdir(directory, { withFileTypes: true });
return dirents
.filter(dirent => dirent.isFile())
.map(dirent => dirent.name);
}
Async version (with callbacks)
const fs = require('fs');
fs.readdir(DIRECTORY_PATH, { withFileTypes: true }, (err, dirents) => {
const filesNames = dirents
.filter(dirent => dirent.isFile())
.map(dirent => dirent.name);
// use filesNames
});
Please See diralik's answer as it is more complete: my answer only works if ALL filenames contain a '.txt' extension.
why not just filter out files that end in ".txt"?
var fs = require("fs")
fs.readdirSync("./").filter(function(file) {
if(file.indexOf(".txt")>-1)console.log(file)
})
I should have added previously that to get an array of these files you need to return them to an array as shown below.
var fs = require("fs")
let text_file_array = fs.readdirSync("./").filter(function(file) {
if(file.indexOf(".txt")>-1) return file;
})

Categories