How do I refactor this composed function with Ramda.js? - javascript

i am writing a small utility using ramda and data.task that reads image files out of a directory and outputs their size. I got it working like so:
const getImagePath = assetsPath => item => `${assetsPath}${item}`
function readImages(path) {
return new Task(function(reject, resolve) {
fs.readdir(path, (err, images) => {
if (err) reject(err)
else resolve(images)
})
})
}
const withPath = path => task => {
return task.map(function(images) {
return images.map(getImagePath(path))
})
}
function getSize(task) {
return task.map(function(images) {
return images.map(sizeOf)
})
}
const getImageSize = dirPath => compose(getSize, withPath(dirPath), readImages)
The problem is with the withPath function that adds the correct image path to the image file name but forces my api to pass in the directoryName twice: once for reading the files and second time for reading the path. This means I have to call the getImageSize function like so:
const portfolioPath = `${__dirname}/assets/`
getImageSize(portfolioPath)(portfolioPath).fork(
function(error) {
throw error
},
function(data) {
console.log(data)
}
)
Is there any way to pass the dirname as a parameter only once? I want the api to work like this:
getImageSize(portfolioPath).fork(
function(error) {
throw error
},
function(data) {
console.log(data)
}
)

You shouldn't be building paths manually like that
One of Node's better APIs is the Path module – I would recommend that your readImages wrapper is made a generic readdir wrapper, and instead resolve an Array of path.resolve'd file paths
const readdir = dir =>
new Task ((reject, resolve) =>
fs.readdir (dir, (err, files) =>
err
? reject (err)
: resolve (files.map (f => path.resolve (dir, f)))
const getImagesSizes = dir =>
readdir (dir) .map (R.map (sizeOf))
Wrapping the Node continuation-passing style APIs just to return a Task gets to be a bother, doesn't it?
const taskify = f => (...args) =>
Task ((reject, resolve) =>
f (...args, (err, x) =>
err ? reject (err) : resolve (x)))
const readdir = (dir, ...args) =>
taskify (fs.readdir) (dir, ...args)
.map (R.map (f => path.resolve (dir, f)))
const getImagesSizes = dir =>
readdir (dir) .map (R.map (sizeOf))
You should probably also take care to file out file paths that are directories – unless your sizeOf implementation handles that

I managed to solve this by passing the Task resolution a single object like so:
function readImages(path) {
return new Task(function(reject, resolve) {
fs.readdir(path, (err, images) => {
if (err) reject(err)
else resolve({ images, path })
})
})
}
const withPath = task => {
return task.map(function({ images, path }) {
return images.map(getImagePath(path))
})
}
...and then destructing it out of the task payload and now my compose function looks like this:
module.exports = (function getImageSize(dirPath) {
return compose(getSize, withPath, readImages)
})()
And my api call looks like this:
getImageSize(portfolioPath).fork(
function(error) {
throw error
},
function(data) {
console.log(data)
}
)

Related

Promise function doesn't trigger after another promise

I'm working on a microcontroller that would either take docx files or html strings in input and would transform it into a singular pdf file and return its link as an ouput.
My code looks like this so far:
// 'files' is an array of uploaded docx files.
const uploaded = files.map((file) => {
return new Promise((resolve, reject) => {
pump(
file.toBuffer(),
fs.createWriteStream(join(__dirname, 'files', file.filename))
.on('finish', resolve)
)
})
})
Promise.all(uploaded)
// Is triggered
.then(async () => await convertFiles())
// Is not triggered
.then(async () => {
// concatStoreFiles() is an external function because I need it somewhere else too
test = await concatStoreFiles(join(__dirname, 'files'))
console.log({test})
res.send(test)
})
const convertFiles = () => {
return new Promise((resolve, reject) => {
const cmd = `soffice --headless --convert-to pdf --outdir ${join(__dirname, 'files')} ${join(__dirname, 'files', '*.*')}`
exec(cmd, (error, stdout, stderror) => {
if (error) console.warn(error)
resolve(stdout ?? stderror)
})
})
}
concatStoreFile.js
module.exports = async function concatFiles (dirPath, outPath) {
return new Promise ((resolve, reject) => {
const existingFiles = []
fs.readdir(dirPath, (e, files) => {
files.forEach((file) => {
// is added to the files list only if finishing with ".pdf"
if (/[\d\w_-]+.pdf/.matches(file)) {
existingFiles.push(file)
}
});
resolve(existingFiles)
})
})
}
I'm working with Insomnia for my development / test process, and it tells me that I get an empty response. However, I'm supposed to get an array of pdf files existing in a specific directory. I'm not even getting console.log({test}), so I don't think my second then() is triggered.
I'm really rusty with async / await and Promise syntaxes, what should I do in this situation?
Thank you in advance
The #fastify/multipart's toBuffer() API returns a Promise, not a buffer. Checkout this article
So you need to write something like:
const uploaded = files.map(processFile)
async function processFile (file) {
const buffer = await file.toBuffer()
const storedFileName = join(__dirname, 'files', file.filename)
const writeStream = fs.createWriteStream(storedFileName)
return new Promise((resolve, reject) => {
pump(buffer, writeStream, (err) => {
if(err) { return reject(err) }
resolve(storedFileName)
})
}
}
Moreover, to improve the code, I returned the storedFileName instead of recalculating it.
You can convert this:
.then(async () => await convertFiles())
to this:
.then(() => convertFiles())
Mixing async/await and promise then/catch leads to hidden bugs hard to find

Cannot add up results of a function in a loop

I have this function getSize() (from npm module: get-folder-size) that's calculating the total size of all files in a folder (directory).
const getSize = require('get-folder-size')
let folders = ["C:\\test folder", "C:\\test folder 2\\sub folder"]
funciton totalFilesizeOfAllFolders () {
let totalSizeOfAllFolders = 0
this.folders.forEach(folder => {
getSize(folder, (err, size) => {
if (err) { throw err }
// ADD UP THE "SIZE" TO THE TOTAL SOMEHOW
// Just doing the following returns 0: totalSizeOfAllFolders += size
})
})
return totalSizeOfAllFolders
}
Question
How do I loop through the array folders properly and add up the calculated sizes of all the folders in the array? I'm not sure how to return the size out of the function
You can use Promise.all here, first construct an array of Promises, and then await all of them calculating the total size:
//const getSize = require('get-folder-size')
let folders = ["C:\\test folder", "C:\\test folder 2\\sub folder"];
function totalFilesizeOfAllFolders(callback) {
let folderPromises = folders.map(folder => { // use map to create a new array of promises
return new Promise((resolve, reject) => getSize(folder, (err, size) => {
if (err) {reject(err)}
resolve(size);
}));
})
Promise.all(folderPromises) // promise.all waits for all promises in the array to resolve
.then(sizes => callback(sizes.reduce((a, b) => a + b, 0))); // reduce the array of sizes to a size
}
totalFilesizeOfAllFolders(s => console.log(s)); // => 127
// getsize stub. remove me once you have access to the npm package
function getSize(name, callback) {let sizes = {"C:\\test folder":112, "C:\\test folder 2\\sub folder":15}; callback(null,sizes[name])}
You could use a library like Async to help you iterate over the calls asynchronously then pass a callback to your function to return the totalSizeOfAllFolders.
function totalFilesizeOfAllFolders (done) {
let totalSizeOfAllFolders = 0;
async.each(folders, (folder, callback) => {
getSize(folder, (err, size) => {
if (err) { throw err }
totalSizeOfAllFolders++;
callback();
});
}, (err) => {
done(totalSizeOfAllFolders);
});
}

migrating from data.task to folktale on handling rejection

In data.task package, I could resolve or reject a api call as following:
import Task from 'data.task';
import fs from 'fs';
const readFile = (filename, enc) => {
return new Task((rej, res) =>
fs.readFile(filename, enc, (err, contents) => {
err ? rej(err) : res(contents);
})
);
};
How would I accomplish that in the new folktale version of Task? I can resolve requests, but how do I reject? I have tried the following:
const {task, rejected} = require('folktale/concurrency/task');
import fs from 'fs';
const readFile = (filename, enc) => {
return task(res => {
fs.readFile(filename, enc, (err, contents) => {
err ? rejected(err) : res.resolve(contents);
});
});
};
const writeFile = (filename, contents) => {
return task(res => {
fs.writeFile(filename, contents, (err, success) => {
err ? rejected(err) : res.resolve(success);
});
});
};
const app = readFile('FILE_DOESNOT_EXIST.json', 'utf-8')
.map(contents => contents.replace(/8/g, '6'))
.chain(contents => writeFile('config1.json', contents));
app.run().listen({
onCancelled: () => {
console.log('the task was cancelled');
},
onRejected: () => {
console.log('something went wrong');
},
onResolved: value => {
console.log(`The value is Good`);
},
});
When I gave a file that doesn't exist, the onRejected handler does not get called.
What do I expect to see:
Since I have the program read a file that does not exist, it should run onRejected which should log something went wrong.
What do I see now:
Nothing. The program does not bug out, but it also does not produce anything, it simply runs as normal.
When using data.task(the older version of Task), I can use reject which is why it stills works there. How do I do it now with the new version of Task?
Ok this is really silly! For some reason I could not find this solution right away on the doc.That's why I imported the rejected from task...
Basically resolver function coming from task has not only resolve, but also reject, which should have been obvious, but it was not on the doc.
So here is working code:
import {task} from 'folktale/concurrency/task';
import fs from 'fs';
const readFile = (filename, enc) => {
return task(res => {
fs.readFile(filename, enc, (err, contents) => {
err ? res.reject() : res.resolve(contents);
});
});
};
const writeFile = (filename, contents) => {
return task(res => {
fs.writeFile(filename, contents, (err, success) => {
err ? res.reject() : res.resolve(success);
});
});
};
const app = readFile('confg.json', 'utf-8')
.map(contents => contents.replace(/8/g, '6'))
.chain(contents => writeFile('config1.json', contents));
app.run().listen({
onCancelled: () => {
console.log('the task was cancelled');
},
onRejected: () => {
console.log('something went wrong');
},
onResolved: value => {
console.log(`The value is Good`);
},
});

Recursive Promise-based directory reading

I have a library that scans a directory for files on a remote server. It returns a Promise like this:
client.scanRemoteDirectory(path)
.then(files => {
console.log(files)
})
I'm trying to write a recursive method to scan directories and subdirectories too. But I'm running into some async issues. My function is like this:
const scanDir(path) {
// Scan the remote directory for files and sub-directories
return client.scanRemoteDirectory(path)
.then(files => {
for (const file of files) {
// If a sub-directory is found, scan it too
if (file.type === 'directory') {
return scanDir(file.path) // Recursive call
}
}
})
}
const scanDir('some/path')
.then(() => {
console.log('done')
})
This works however because of the return in front of the scanDir() recursive method call, this results in the method only scanning the first subdirectory in each directory and skipping the rest.
So for example if the structure is something like this:
/some/path
/some/path/dirA
/some/path/dirA/subdirA
/some/path/dirB
/some/path/dirB/subdirB
The above method will only scan:
/some/path
/some/path/dirA
/some/path/subdirA
It will skip dirB and it's children altogether since the method finds dirA first.
If I simply remove the return from the return scanDir(...) call, then it scans everything just fine. But then my final console.log('done') happens too soon because it's async.
So how do I solve this problem? What is the proper recursive Promise approach where I can still preserve async but also scan every subdirectory recursively?
You might want to use Promise.all in this situation to run your 'sub' promises in parallel, for example:
function scanDir(path) {
return client.scanRemoteDirectory(path)
.then(all => {
const files = all.where(file => file.type !== 'directory);
const dirs = all.where(file => file.type === 'directory);
return Promise.all(dirs.map(dir => scanDir(dir.path)) // Execute all 'sub' promises in parallel.
.then(subFiles => {
return files.concat(subFiles);
});
});
}
Alternatively you could use the reduce function to run your 'sub' promises in sequence:
function scanDir(path) {
return client.scanRemoteDirectory(path)
.then(all => {
const files = all.where(file => file.type !== 'directory);
const dirs = all.where(file => file.type === 'directory);
return dirs.reduce((prevPromise, dir) => { // Execute all 'sub' promises in sequence.
return prevPromise.then(output => {
return scanDir(dir.path)
.then(files => {
return output.concat(files);
});
});
}, Promise.resolve(files));
});
}
Async / await is definitely the easiest solution to read:
async function scanDir(path) {
const output = [];
const files = await client.scanRemoteDirectory(path);
for (const file of files) {
if (file.type !== 'directory') {
output.push(file);
continue;
}
const subFiles = await scanDir(file.path);
output = output.concat(subFiles);
}
return output;
}
I would make the then handler async so you can use await in the loop:
const scanDir(path) {
// Scan the remote directory for files and sub-directories
return client.scanRemoteDirectory(path)
.then(async files => {
for (const file of files) {
// If a sub-directory is found, scan it too
if (file.type === 'directory') {
await scanDir(file.path) // Recursive call
}
}
})
}

NodeJS concatenate all files in a directory

Is there a faster or more succinct way to concatenate all of the files located in a directory using NodeJS?
In bash I could do something like this:
for file in $1
do
cat "$file"
echo
done > $2;
Here is what I'm doing now:
var fs = require('fs');
var Promise = require('bluebird');
module.exports = function(directory, destination) {
return new Promise((resolve, reject) => {
fs.readdir(directory, (err, files) => {
if (err) {
return reject(err);
}
(function next() {
var file = files.shift();
if (!file) {
return resolve();
}
fs.readFile(directory + '/' + file, (err, content) => {
if (err) {
return reject(err);
}
fs.appendFile(destination, '\n' + content, (err) => {
if (err) {
return reject(err);
}
return next();
});
});
})();
});
});
};
That ?
require('child_process').execSync('cat *').toString('UTF-8')
:D
Using the async library you can easily read files in parallel and then join the results.
const fs = require("fs");
const async = require("async");
const path = require("path");
module.exports = function(directory, destination) {
return new Promise((resolve, reject) => {
fs.readdir(directory, (err, files) => {
if (err)
return reject(err);
files = files.map(file => path.join(directory,file));
//Read all files in parallel
async.map(files, fs.readFile, (err, results) => {
if (err)
return reject(err);
//results[0] contents of file #1
//results[1] contents of file #2
//results[n] ...
//Write the joined results to destination
fs.writeFile(destination, results.join("\n"), (err) => {
if (err)
return reject(err);
resolve();
});
});
});
});
}
If you're going to use bluebird than you get the benefit of promisification. You can use promisifyAll() to convert all error first callback accepting async functions in the fs module to return a promise. You can read more about in the above promisification link.
The below code reads in all of the files as strings and then reduces all of their contents into a single string and writes that string to the destination.
Its probably best to not catch() any returned errors here. Rather, the caller should attach a catch() to handle any returned errors as they need.
const Promise = require('bluebird')
const fs = Promise.promisifyAll(require('fs'))
const path = require('path')
module.exports = (directory, destination) => {
return fs.readdirAsync(directory)
.map(file => fs.readFileAsync(path.join(directory, file), 'utf8'))
.then(contents => fs.writeFileAsync(destination, contents.join('\n')))
}
All in one line:
fs.readdirSync('./').forEach((file) => { if(fs.lstatSync(file).isFile()) fs.appendFileSync('./output.file', fs.readFileSync(file).toString()) })
Replace './' with target directory and './output.file' with target destination.
You can do the same thing without bluebird , since node 8.X includes the util package to promisify fs API.
This exemple shows how i use it in my project to concatenate minified files (so remove the filter if necessary).
You can do the same thing without bluebird , since node 8.X includes the util package to promisify fs API.
This exemple shows how i use it in my project to concatenate minified files (so remove the filter if necessary).
const {promisify} = require("util"); //requires node 8.X
const readdir = promisify(fs.readdir);
const readFile = promisify(fs.readFile);
const appendFile = promisify(fs.appendFile);
// Append all minified and obsfucated files in source directory
// The resulting file is generated in destination
function appendFile(directory, destination) {
readdir(directory)
.then((files) => {
console.log('FILES CONTENT:', files);
files.filter(file => {
console.log('FILTER > ' + file);
return (file.indexOf('-min.js') != -1 && file.indexOf('-min.js.map') == -1)
})
.map(file => {
console.log('MAP ('+destination+') > ' + path.join(directory, file));
readFile(path.join(directory, file), 'utf8')
.then(data => {
//console.log('DATA:', data);
appendFile(destination, data+'\n')
.then(() => {
console.log('append done');
})
.catch((err) => {
displayError(err);
});
});
});
})
.catch((err) => {
console.log('ERROR:', err);
displayError(err);
});
}

Categories