How to check if path is a directory or file? - javascript

I want to check if the path is a file or a directory. If it's a directory then Log the directory and file separately. Later I want to send them as json object.
const testFolder = './data/';
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
console.log(`FILES: ${file}`);
})});
Edit:
If I try to this
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
if (fs.statSync(file).isDirectory()) {
console.log(`DIR: ${file}`);
} else {
console.log(`FILE: ${file}`)
}
})});
I get this error:
nodejs binding.lstat(pathModule._makeLong(path))
Update: Found the solution. I had to add testFolder + file like this :
if (fs.statSync(testFolder + file).isDirectory()) {

quick google search..
var fs = require('fs');
var stats = fs.statSync("c:\\dog.jpg");
console.log('is file ? ' + stats.isFile());
read: http://www.technicalkeeda.com/nodejs-tutorials/how-to-check-if-path-is-file-or-directory-using-nodejs

Since Node 10.10+, fs.readdir has withFileTypes option which makes it return directory entry fs.Dirent instead of just the filename. Directory entry contains useful methods such as isDirectory or isFile.
Your example then would be solved by:
const testFolder = './data/';
fs.readdir(testFolder, { withFileTypes: true }, (err, dirEntries) => {
dirEntries.forEach((dirEntry) => {
const { name } = dirEntry;
if (dirEntry.isDirectory()) {
console.log(`DIR: ${name}`);
} else {
console.log(`FILE: ${name}`);
}
})})

Related

Nodejs path.resolve is not defined

// codenotworking
const path = require("path");
const fs = require("fs");
log = console.log;
const names = [];
function collectFileNamesRecursively(path) {
fs.readdir(path, (err, files) => {
err ? log(err) : log(files);
// replacing paths
for (const index in files) {
const file = files[index];
files[index] = path.resolve(path, file);
}
for (let file of files) {
fs.stat(file, (err, stat) => {
err ? log(err) : null;
if (stat.isDirectory()) {
collectFileNamesRecursively(file);
}
names.push(file);
});
}
});
}
collectFileNamesRecursively(path.join(__dirname, "../public"));
i am using nodejs v10.8.0 and the directory stucture is
- project/
- debug/
- codenotworking.js
- public/
- js/
- file2.js
- file.html
whenever i run this code i get the following error
TypeError: path.resolve is not a function
at fs.readdir (C:\backup\project\debug\codenotworking.js:17:24)
at FSReqWrap.oncomplete (fs.js:139:20)
what am i doing wrong here ?
You're shadowing your path import by specifing the path parameter in collectFileNamesRecursively. Change the parameter name to something else.
Apart from that using recursion with callbacks this way won't work - I would recommend using async/await. Something like:
const path = require('path');
const fs = require('fs');
async function collectFileNamesRecursively(currBasePath, foundFileNames) {
const dirContents = await fs.promises.readdir(currBasePath);
for (const file of dirContents) {
const currFilePath = path.resolve(currBasePath, file);
const stat = await fs.promises.stat(currFilePath);
if (stat.isDirectory()) {
await collectFileNamesRecursively(currFilePath, foundFileNames);
} else {
foundFileNames.push(file);
}
}
}

Upload directory based on the form fields data in formidable?

When uploading files (images for a Project entity) I would like to create a new Linux subdirectory in /public/images for each Project ID to store its images in. However the images are saved immediately into the directory provided before I can specify the Project ID (which is in the request). Is there a way to do this with formidable or perhaps multer?
// Upload Image
router.post("/project_image", function(req, res, next) {
const form = new IncomingForm({
uploadDir: process.cwd() + "/public/images", // <- e.g. I would like this to be `/public/images/${req.body.project_id}`
keepExtensions: true
});
form.parse(req);
let project;
form.on("field", (name, value) => {
project = JSON.parse(value);
});
form.on("file", (field, file) => {
let path = file.path;
let fileName = path.substr(path.lastIndexOf("upload"));
return req.db
.from("projects")
.where("id", "=", project.project_id)
.update({ image: "/images/" + fileName })
.then(() => {
return res.status(200).json({
message: "Image Upload Successful",
error: false
});
})
form.on("end", () => {});
});
Thank you.
Solved it myself with the following. Basically I move the file to its intended destination.
// Upload Image
router.post("/project_image", function(req, res, next) {
const directory = process.cwd() + "/public/images";
const form = new IncomingForm({
uploadDir: directory,
keepExtensions: true
});
form.parse(req);
let project;
form.on("field", (name, value) => {
project = JSON.parse(value);
});
form.on("file", (field, file) => {
let path = file.path;
let fileName = path.substr(path.lastIndexOf("upload"));
let destinationPath = directory + `/${project.project_id}/`;
if (fs.existsSync(destinationPath)) {
moveFile(path, destinationPath);
} else {
fs.mkdirSync(directory + `/${project.project_id}/`);
moveFile(path, destinationPath);
}
return req.db
.from("projects")
.where("id", "=", project.project_id)
.update({ image: "/images/" + fileName })
.then(() => {
return res.status(200).json({
message: "Image Upload Successful",
error: false
});
})
form.on("end", () => {});
});
};

Creating a read stream to a directory and piping entire directory to MongoDB Bucket

I am trying to save an entire directory to a MongoDB Bucket.
It works when I zip the file, but I need the files to be unzipped in the bucket.
When I try to stream the directory an error reads "Illegal operation on a directory".
I've been trying to find a solution but cannot find one for a directory.
let conn = mongoose.connection;
let gfb;
conn.once('open', () => {
//init the stream
console.log('- Connection open -');
gfb = new mongoose.mongo.GridFSBucket(conn.db, {
chunkSizeBytes: 1024,
bucketName: 'uploads'
})
})
const uploaddb = (req, res, vile) => {
conn.once('open', () => {
console.log('- Connection open -');
gfb = new mongoose.mongo.GridFSBucket(conn.db, {
chunkSizeBytes: 1024,
bucketName: 'uploads'
})
try {
uploadDir('/Users/nancycollins/virtuload-beta/backend/uploads/unzipped/')
} catch (e) {
console.error(e);
}
});
}
async function uploadDir (folder, subFolder = '') {
const actualFolder = path.join(folder, subFolder)
const contents = await fs.readdir(actualFolder, { withFileTypes: true })
await Promise.all(contents.map((fileOrDirectory) =>
fileOrDirectory.isDirectory()
? uploadDir(folder, path.join(subFolder, fileOrDirectory.name))
: uploadFile(folder, path.join(actualFolder, fileOrDirectory.name))
))
}
async function uploadFile (baseFolder, filePath) {
return new Promise((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(gfb.openUploadStream(filePath, { metadata: path.relative(baseFolder, filePath) }))
.on('error', function(error) {
reject(error);
})
.on('finish', () => {
resolve()
})
})
}
-I have also tried change the try-catch to this:
try {
const folder = '/Users/nancycollins/virtuload-beta/backend/uploads/unzipped/';
uploadDir(folder)
} catch (e) {
console.error(e);
}
EDIT:
This is the updated version.
The error currently reads:
TypeError [ERR_INVALID_ARG_TYPE]: The "path" argument must be of type string. Received type object
at validateString (internal/validators.js:125:11)
at Object.join (path.js:1147:7)
etc...
Workaround:
Upload the folder files and keep their relative folder structure stored in GridFS's metadata. In this example uploadDir is a recursive function that goes through the given folder and its subfolders and calls uploadFile for every file found.
In this example I assume you are using a promisified version of fs.
async function uploadDir (folder, subFolder = '') {
const actualFolder = path.join(folder, subFolder)
const contents = await fs.readdir(actualFolder, { withFileTypes: true })
await Promise.all(contents.map((fileOrDirectory) =>
fileOrDirectory.isDirectory()
? uploadDir(folder, path.join(subFolder, fileOrDirectory.name))
: uploadFile(folder, path.join(actualFolder, fileOrDirectory.name))
))
}
async function uploadFile (baseFolder, filePath) {
return new Promise((resolve, reject) => {
fs.createReadStream(filePath)
.pipe(gfb.openUploadStream(filePath, { metadata: path.relative(baseFolder, filePath) }))
.on('error', function(error) {
reject(error);
})
.on('finish', () => {
resolve()
})
})
}
Edit:
If you don't need to store more info you can just set the GridFS file's name to be the relative path.

Passing arguments to callback function with in a separte module

I would create a new separate module with a function that contains a callback that I call in main file.
My program should take 3 args: first a file directory, then file extention and finally a callback function that should filter a directory by fileextension.
Here's my module.js file
var fs = require('fs')
module.exports = function (directory, fileExtension, callbackfun) {
fs.readdir(directory, callbackfun);
}
then in main file I import module then use function.
modulejs(process.argv[2], process.argv[3], callbackfun(callbackfn));
var callbackfn = function (err, data, fileExtension) {
console.log(fileExtension);
let filtred = data.filter(file => { if (file.indexOf("." + fileExtension) > -1) { console.log(file); } });
};
function callbackfun(callbackfunc1) {
callbackfunc1(err, data, fileExtension);
}
The error that I get actually is
ReferenceError: err is not defined
UPDATE: I have edited my code, I get different error now
UPDATE2: I get the solution on internet but does not understand how it works, if anyboby explain it to me step by step it would be nice
module.jsconst fs = require('fs')
const path = require('path')
module.exports = function (dir, filterStr, callback) {
fs.readdir(dir, function (err, list) {
if (err) {
return callback(err)
}
list = list.filter(function (file) {
return path.extname(file) === '.' + filterStr
})
callback(null, list)
})
}
main.js
const filterFn = require('./module.js')
const dir = process.argv[2]
const filterStr = process.argv[3]
filterFn(dir, filterStr, function (err, list) {
console.log(list);
if (err) {
return console.error('There was an error:', err)
}
list.forEach(function (file) {
console.log(file)
})
})

Append currently logged in user to JSON file

I am trying to find a way to get the currently logged in user and than append them to a JSON file. Below is my code to first read the dir, then get the most recent file, return it and then append the current user that is logged in.
I can append a string to the file but when trying to perform req.user it states
Cannot read property 'user' of undefined
What would I need to include in this file so that it knows what user is?
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
path = require("path");
let getFileAddUser = () => {
let filePath = '../automation_projects/wss-automation-u/results/temp/';
fs.readdir(filePath, (err, files) => {
if (err) { throw err; }
let file = getMostRecentFile(files, filePath);
console.log(file);
fs.readFile(filePath + file, 'utf8', (err, data) => {
let json = JSON.parse(data);
if(err){
console.error(err);
return;
} else {
//Un-comment to write to most recent file.
//==================================================
//This should find the currently logged in user and append them to the most recent file found.
json.currentuser = req.user;
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error){
console.error(error);
return;
} else {
console.log(json);
}
});
//==================================================
console.log(data);
}
});
});
};
//Get the most recent file from the results folder.
function getMostRecentFile(files, path) {
let out = [];
files.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
module.exports = getFileAddUser;
Thanks to a knowledgeable co-worker and some further research we were able to get this working. I'd like to share the code we came up with to append the currently logged in user to our results file. You will also notice we got some help using the Ramada.js library.
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
r = require("ramda"),
path = require("path");
//This will be our function to get the most recent file from our dir and
//return it to us. We than user this function below.
function getMostRecentFile(files, path) {
let out = [];
let f = r.tail(files);
console.log(files);
f.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
//Passing in 'u' as a argument which can than be used in a route and pass in
//anything that we want it to be. In our case it was the currently logged
//in user.
let getUser = (u) => {
let user = u;
let filePath = '../automation_projects/wss-automation-u/results/temp/';
//Comment above and uncomment below for testing locally.
// let filePath = "./temp/";
let file = "";
//Below we read our dir then get the most recent file using the
//getMostRecentfile function above.
read_directory(filePath).then( files => {
file = getMostRecentFile(files, filePath)
console.log(file);
return(read_file(filePath + file))
}).then( x => {
// Here we parse through our data with x representing the data that we
//returned above.
let json = JSON.parse(x);
return new Promise(function(resolve, reject) {
json.currentuser = u;
//And finally we write to the end of the latest file.
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error) reject(error);
else resolve(json);
// console.log(json);
});
});
});
}
let read_directory = (path) => {
return new Promise((resolve, reject) => {
fs.readdir(path, (err, items) => {
if (err){
return reject(err)
}
return resolve([path, ...items])
})
})
}
let read_file = (path) => {
return new Promise((resolve, reject) => {
fs.readFile(path, "utf8", (err, items) => {
if (err){
return reject(err)
}
return resolve(items)
})
})
}
module.exports = getUser;
Than below is an example route with how to use the getUser module. You will want to require it like you do everything else with node.js and dependencies. Hope this helps someone in the future.
let getUser = require("getuser");
//Make a route to use the getUser module and pass in our argument value.
app.get("/", (req, res) => {
//With in the get user function pass in whatever you want to equal 'u' from the getuser module.
getUser(req.user.username);
res.render("index", { username: req.user });
});

Categories