// codenotworking
const path = require("path");
const fs = require("fs");
log = console.log;
const names = [];
function collectFileNamesRecursively(path) {
fs.readdir(path, (err, files) => {
err ? log(err) : log(files);
// replacing paths
for (const index in files) {
const file = files[index];
files[index] = path.resolve(path, file);
}
for (let file of files) {
fs.stat(file, (err, stat) => {
err ? log(err) : null;
if (stat.isDirectory()) {
collectFileNamesRecursively(file);
}
names.push(file);
});
}
});
}
collectFileNamesRecursively(path.join(__dirname, "../public"));
i am using nodejs v10.8.0 and the directory stucture is
- project/
- debug/
- codenotworking.js
- public/
- js/
- file2.js
- file.html
whenever i run this code i get the following error
TypeError: path.resolve is not a function
at fs.readdir (C:\backup\project\debug\codenotworking.js:17:24)
at FSReqWrap.oncomplete (fs.js:139:20)
what am i doing wrong here ?
You're shadowing your path import by specifing the path parameter in collectFileNamesRecursively. Change the parameter name to something else.
Apart from that using recursion with callbacks this way won't work - I would recommend using async/await. Something like:
const path = require('path');
const fs = require('fs');
async function collectFileNamesRecursively(currBasePath, foundFileNames) {
const dirContents = await fs.promises.readdir(currBasePath);
for (const file of dirContents) {
const currFilePath = path.resolve(currBasePath, file);
const stat = await fs.promises.stat(currFilePath);
if (stat.isDirectory()) {
await collectFileNamesRecursively(currFilePath, foundFileNames);
} else {
foundFileNames.push(file);
}
}
}
Related
This is the code I currently have, how would adapt this to check each sub-directory:
const fs = require('fs')
module.exports = (client, Discord) =>{
const command_files = fs.readdirSync('./commands/').filter(file => file.endsWith('.js'))
for(const file of command_files){
const command = require(`../commands/${file}`);
if(command.name) {
client.commands.set(command.name, command);
} else {
continue
}
}
}
And this is the layout I have for the commands folder the folder layout
You need to wrap the whole code into a function and use some recursion.
Please note that, when using recusion, a depth variable is a wise way to handle it
Something like this should do it:
const fs = require('fs')
module.exports = (client, Discord) =>{
const depth = 3;
const finder = (path, currentDepth = 0) => {
if (currentDepth >= depth) {
return; // Breaks here
}
const dirContent = fs.readdirSync(path);
const command_files = dirContent.filter(file => file.endsWith('.js'));
const folders = dirContent.filter(file => {
const dirPath = path + file;
// Exists + is a directory verification
return fs.existsSync(dirPath) && fs.lstatSync(dirPath).isDirectory();
);
for(const file of command_files){
const filePath = '../' + path + file;
const command = require(filePath);
if(command.name) {
client.commands.set(command.name, command);
} else {
continue
}
}
// Loops through folders
folders.map((folder) => finder(path + folder + '/', currentDepth + 1));
}
finder('./commands/');
}
The issue that I am running into is that when I test the function against a .zip file when the function gets to the fs.createReadStream&Zip the function is not running or returning an error and I would like to get an understanding on what I am doing wrong and how a correct solution would look.
const AWS = require('aws-sdk');
const fs = require('fs');
const mkdirp = require('mkdirp');
const unzipper = require('unzipper');
exports.handler = async (event, context) => {
// Variables for bucket init
let sourceBucket = 'am-doc-mgmt-s3-dev-landing';
let storageBucket = 'am-doc-mgmt-s3-dev';
// Variables for folder init and Buffer config
const localZippedFolder = '/tmp/ZippedStudentData/';
const localUnzippedFolder = '/tmp/UnzippedStudentData/';
const ZipBuffer = Buffer.from(localZippedFolder, 'base64');
const UnzippedBuffer = Buffer.from(localUnzippedFolder, 'base64');
// Inits AWS s3 Bucket and DynamoDB
let s3 = new AWS.S3();
let docClient = new AWS.DynamoDB.DocumentClient({ region: 'us-east-1' });
// Gets the file bucket and file name of the s3 object from context
let fileBucket = event.Records[0].s3.bucket.name;
let fileName = event.Records[0].s3.object.key;
let params = {
Bucket: fileBucket,
Key: fileName
};
// Creates temporary variables
let tempFile = localZippedFolder + fileBucket;
let tempUnzippedFile = localUnzippedFolder + fileBucket;
// Make Directories for Zipped and Unzipped files
try {
const zipDirFolder = await mkdirp(localZippedFolder, { recursive: true })
const unZipDirFolder = await mkdirp(localUnzippedFolder, { recursive: true });
console.log('SUCCESS: unzipped directory created!');
console.log('SUCCESS: zipped directory create!')
// Download files from s3 Bucket
let newFolder = await s3.getObject(params).promise()
.then(data => {
console.log(data);
return data;
});
// Extract files from zipped folder and store them in a local directory
fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}))
.on('finish', () => {
fs.readdir(unZipDirFolder);
}).on('error', (err) => {
// error handling here
console.log(err);
});
}
catch (error) {
console.log(error);
}
};
error: not getting anything back from the fs.createReadStream function. Its as if it just skips over the function.
It's honestly hard to figure out what problem you're really trying to solve since you just aren't very specific about that. If you want the containing async function to not resolve it's promise until the unzipping is done, you can wrap the stream in a promise like this:
const AWS = require('aws-sdk');
const fs = require('fs');
const mkdirp = require('mkdirp');
const unzipper = require('unzipper');
exports.handler = async (event, context) => {
// Variables for bucket init
let sourceBucket = 'am-doc-mgmt-s3-dev-landing';
let storageBucket = 'am-doc-mgmt-s3-dev';
// Variables for folder init and Buffer config
const localZippedFolder = '/tmp/ZippedStudentData/';
const localUnzippedFolder = '/tmp/UnzippedStudentData/';
const ZipBuffer = Buffer.from(localZippedFolder, 'base64');
const UnzippedBuffer = Buffer.from(localUnzippedFolder, 'base64');
// Inits AWS s3 Bucket and DynamoDB
let s3 = new AWS.S3();
let docClient = new AWS.DynamoDB.DocumentClient({ region: 'us-east-1' });
// Gets the file bucket and file name of the s3 object from context
let fileBucket = event.Records[0].s3.bucket.name;
let fileName = event.Records[0].s3.object.key;
let params = {
Bucket: fileBucket,
Key: fileName
};
// Creates temporary variables
let tempFile = localZippedFolder + fileBucket;
let tempUnzippedFile = localUnzippedFolder + fileBucket;
// Make Directories for Zipped and Unzipped files
try {
const zipDirFolder = await mkdirp(localZippedFolder, { recursive: true })
const unZipDirFolder = await mkdirp(localUnzippedFolder, { recursive: true });
console.log('SUCCESS: unzipped directory created!');
console.log('SUCCESS: zipped directory create!')
// Download files from s3 Bucket
let newFolder = await s3.getObject(params).promise();
await new Promise((resolve, reject) => {
// Extract files from zipped folder and store them in a local directory
fs.createReadStream(params.Key)
.pipe(unzipper.Extract({path: unZipDirFolder}))
.on('finish', resolve);
.on('error', reject);
});
} catch (error) {
console.log(error);
// rethrow error so caller sees the error
throw error;
}
};
And, your caller using this exported function will HAVE to use .then() or await on the returned promise to know when it's done. And, use .catch() or try/catch around await to catch errors.
If someone's open to using Python, they can use a buffer to read and unzip the files. Something like this:
zipped_file = s3_resource.Object(bucket_name=sourcebucketname, key=filekey)
buffer = BytesIO(zipped_file.get()["Body"].read())
zipped = zipfile.ZipFile(buffer)
for file in zipped.namelist():
logger.info(f'current file in zipfile: {file}')
final_file_path = file + '.extension'
with zipped.open(file, "r") as f_in:
content = f_in.read()
destinationbucket.upload_fileobj(io.BytesIO(content),
final_file_path,
ExtraArgs={"ContentType": "text/plain"}
)
There's also a tutorial here: https://betterprogramming.pub/unzip-and-gzip-incoming-s3-files-with-aws-lambda-f7bccf0099c9
I want to check if the path is a file or a directory. If it's a directory then Log the directory and file separately. Later I want to send them as json object.
const testFolder = './data/';
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
console.log(`FILES: ${file}`);
})});
Edit:
If I try to this
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
if (fs.statSync(file).isDirectory()) {
console.log(`DIR: ${file}`);
} else {
console.log(`FILE: ${file}`)
}
})});
I get this error:
nodejs binding.lstat(pathModule._makeLong(path))
Update: Found the solution. I had to add testFolder + file like this :
if (fs.statSync(testFolder + file).isDirectory()) {
quick google search..
var fs = require('fs');
var stats = fs.statSync("c:\\dog.jpg");
console.log('is file ? ' + stats.isFile());
read: http://www.technicalkeeda.com/nodejs-tutorials/how-to-check-if-path-is-file-or-directory-using-nodejs
Since Node 10.10+, fs.readdir has withFileTypes option which makes it return directory entry fs.Dirent instead of just the filename. Directory entry contains useful methods such as isDirectory or isFile.
Your example then would be solved by:
const testFolder = './data/';
fs.readdir(testFolder, { withFileTypes: true }, (err, dirEntries) => {
dirEntries.forEach((dirEntry) => {
const { name } = dirEntry;
if (dirEntry.isDirectory()) {
console.log(`DIR: ${name}`);
} else {
console.log(`FILE: ${name}`);
}
})})
I would create a new separate module with a function that contains a callback that I call in main file.
My program should take 3 args: first a file directory, then file extention and finally a callback function that should filter a directory by fileextension.
Here's my module.js file
var fs = require('fs')
module.exports = function (directory, fileExtension, callbackfun) {
fs.readdir(directory, callbackfun);
}
then in main file I import module then use function.
modulejs(process.argv[2], process.argv[3], callbackfun(callbackfn));
var callbackfn = function (err, data, fileExtension) {
console.log(fileExtension);
let filtred = data.filter(file => { if (file.indexOf("." + fileExtension) > -1) { console.log(file); } });
};
function callbackfun(callbackfunc1) {
callbackfunc1(err, data, fileExtension);
}
The error that I get actually is
ReferenceError: err is not defined
UPDATE: I have edited my code, I get different error now
UPDATE2: I get the solution on internet but does not understand how it works, if anyboby explain it to me step by step it would be nice
module.jsconst fs = require('fs')
const path = require('path')
module.exports = function (dir, filterStr, callback) {
fs.readdir(dir, function (err, list) {
if (err) {
return callback(err)
}
list = list.filter(function (file) {
return path.extname(file) === '.' + filterStr
})
callback(null, list)
})
}
main.js
const filterFn = require('./module.js')
const dir = process.argv[2]
const filterStr = process.argv[3]
filterFn(dir, filterStr, function (err, list) {
console.log(list);
if (err) {
return console.error('There was an error:', err)
}
list.forEach(function (file) {
console.log(file)
})
})
I'm trying to watch for any newly added files to an ftp server, which has the directory mapped to a drive on the server that's running the node application. The problem is that it doesn't register any events for files added through ftp; when files are modified or created through the node application they are picked up fine.
I'm currently using chokidar to watch the directory and log any events with the simple code below:
const watcher = chokidar.watch('./myDir', {
persistent: true,
awaitWriteFinish: {
stabilityThreshold: 2000,
pollInterval: 100
}
});
watcher
.on('add', path => console.log(`File ${path} has been added`))
.on('change', path => console.log(`File ${path} has been changed`));
I've added the awaitWriteFinish option to try to see if it will register when the file is completed from the ftp transfer, but with no joy.
Any suggestions?
You can watch a directory using the native module fs:
const fs = require('fs');
const folderPath = './test';
const pollInterval = 300;
let folderItems = {};
setInterval(() => {
fs.readdirSync(folderPath)
.forEach((file) => {
let path = `${folderPath}/${file}`;
let lastModification = fs.statSync(path).mtimeMs;
if (!folderItems[file]) {
folderItems[file] = lastModification;
console.log(`File ${path} has been added`);
} else if (folderItems[file] !== lastModification) {
folderItems[file] = lastModification;
console.log(`File ${path} has been changed`);
}
});
}, pollInterval);
But the above example will not watch the files in subfolders. Another approach to watch all subfolders, is to use unix find through the node child_process.exec function.
const fs = require('fs');
const {execSync} = require('child_process');
const folderPath = './test';
const pollInterval = 500;
let folderItems = {};
setInterval(() => {
let fileList = execSync(`find ${folderPath}`).toString().split('\n');
for (let file of fileList) {
if (file.length < 1) continue;
let lastModification = fs.statSync(file).mtimeMs;
if (!folderItems[file]) {
folderItems[file] = lastModification;
console.log(`File ${file} has been added`);
} else if (folderItems[file] !== lastModification) {
folderItems[file] = lastModification;
console.log(`File ${file} has been changed`);
}
}
}, pollInterval);