migrating from data.task to folktale on handling rejection - javascript

In data.task package, I could resolve or reject a api call as following:
import Task from 'data.task';
import fs from 'fs';
const readFile = (filename, enc) => {
return new Task((rej, res) =>
fs.readFile(filename, enc, (err, contents) => {
err ? rej(err) : res(contents);
})
);
};
How would I accomplish that in the new folktale version of Task? I can resolve requests, but how do I reject? I have tried the following:
const {task, rejected} = require('folktale/concurrency/task');
import fs from 'fs';
const readFile = (filename, enc) => {
return task(res => {
fs.readFile(filename, enc, (err, contents) => {
err ? rejected(err) : res.resolve(contents);
});
});
};
const writeFile = (filename, contents) => {
return task(res => {
fs.writeFile(filename, contents, (err, success) => {
err ? rejected(err) : res.resolve(success);
});
});
};
const app = readFile('FILE_DOESNOT_EXIST.json', 'utf-8')
.map(contents => contents.replace(/8/g, '6'))
.chain(contents => writeFile('config1.json', contents));
app.run().listen({
onCancelled: () => {
console.log('the task was cancelled');
},
onRejected: () => {
console.log('something went wrong');
},
onResolved: value => {
console.log(`The value is Good`);
},
});
When I gave a file that doesn't exist, the onRejected handler does not get called.
What do I expect to see:
Since I have the program read a file that does not exist, it should run onRejected which should log something went wrong.
What do I see now:
Nothing. The program does not bug out, but it also does not produce anything, it simply runs as normal.
When using data.task(the older version of Task), I can use reject which is why it stills works there. How do I do it now with the new version of Task?

Ok this is really silly! For some reason I could not find this solution right away on the doc.That's why I imported the rejected from task...
Basically resolver function coming from task has not only resolve, but also reject, which should have been obvious, but it was not on the doc.
So here is working code:
import {task} from 'folktale/concurrency/task';
import fs from 'fs';
const readFile = (filename, enc) => {
return task(res => {
fs.readFile(filename, enc, (err, contents) => {
err ? res.reject() : res.resolve(contents);
});
});
};
const writeFile = (filename, contents) => {
return task(res => {
fs.writeFile(filename, contents, (err, success) => {
err ? res.reject() : res.resolve(success);
});
});
};
const app = readFile('confg.json', 'utf-8')
.map(contents => contents.replace(/8/g, '6'))
.chain(contents => writeFile('config1.json', contents));
app.run().listen({
onCancelled: () => {
console.log('the task was cancelled');
},
onRejected: () => {
console.log('something went wrong');
},
onResolved: value => {
console.log(`The value is Good`);
},
});

Related

TypeError: resolve is not a function

I am trying to write a function that will read a file and depending upon success or failure in reading it will call the resolve or reject function. Below is the code that I have written.
let fs = require('fs');
const FILE_NAME = './assets/pies.json';
let pieRepo = {
get: function(resolve, reject) {
fs.readFile(FILE_NAME, function(err, data) {
if(err) {
reject(err);
}
else {
resolve(JSON.parse(data));
}
});
}
};
module.exports = pieRepo;
However when I run "npm start" it throws the below error
/workingdirectory/repos/pieRepo.js:12
resolve(JSON.parse(data));
^
TypeError: resolve is not a function
at /workingdirectory/repos/pieRepo.js:12:17
at FSReqWrap.readFileAfterClose [as oncomplete] (internal/fs/read_file_context.js:53:3)
I am using version 16.11.0 of node.
The issue you are facing is with the way you are trying to resolve promise. The Resolve & Reject Functions are available within Promise
let fs = require("fs");
const FILE_NAME = "YOUR PATH TO FILE";
let pieRepo = {
get: () => {
return new Promise((resolve, reject) => {
fs.readFile(FILE_NAME, (err, data) => {
if (err) {
return reject(err);
}
resolve(JSON.parse(data));
});
});
},
};
module.exports = pieRepo;
You can further read about Promise
Resolve and reject are functions available in promises.
Replace your code with this:
let fs = require('fs');
const FILE_NAME = './assets/pies.json';
let pieRepo = {
get: () => {
return new Promise((resolve, reject) => {
fs.readFile(FILE_NAME, (err, data) => {
if (err) {
return reject(err);
}
resolve(JSON.parse(data));
});
});
},
};
module.exports = pieRepo;
Now, you can await the get function.
If this were my project though, the code would look like this:
let fs = require('fs/promises');
const FILE_NAME = './assets/pies.json';
let pieRepo = {
get: async () => {
try {
const data = await fs.readFile(FILE_NAME);
return data;
} catch (err) {
throw new Error(err.message);
}
},
};
module.exports = pieRepo;
If you want to promisify fs.readFile, you can, but promisified versions of fs functions are available through fs/promises.
You have to use Promise, here is a very simple example:
test() {
return new Promise((resolve, reject) => {
if (1 === 1) {
resolve(true)
} else {
reject(false)
}
})
}

How to call result callback of async.map in JavaScript?

I want to understand a mechanism of callbacks, so i try to compute a summary size for files in one directory. Here is my code:
import path from 'path';
import fs from 'fs';
import async from 'async';
export const getDirectorySize = (directory) => {
fs.readdir(directory, (error, files) => {
if (error) throw error;
const counter = (el, callback) => {
fs.stat(path.join(directory, el), (error2, result) => {
if (error2) throw error2;
if (result.isFile()) callback(null, result.size);
});
}
async.map(files, counter, (error3, results) => {
if (error3) throw error3;
console.log(results.reduce((prev, current) => prev + current, 0));
});
});
}
getDirectorySize('/usr/src/app');
So when i run code above, the third argument (callback) of async.map doesn't call. But function "counter" is called (There were no errors)! So how to see the results of async.map?

Rename files asynchronously in Node.js if destination files don't exist

I am trying to rename files asynchronously in Node.js only if destination files don't exist.
I made a quick test like follows:
const fs = require('fs')
const files = [ 'file1', 'file2', 'file3' ]
const new_name = 'new-name' // same destination name for all
fs.exists() - DEPRECATED
for (let file of files)
fs.exists(new_name, (exists) => {
if (!exists) fs.rename(file, new_name, (err) => {})
})
fs.access() - RECOMMENDED
for (let file of files)
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) fs.rename(file, new_name, (err) => {})
})
fs.move() - from fs-extra
const fs_extra = require('fs-extra')
for (let file of files)
fs_extra.move(file, new_name, { overwrite: false }, (err) => {})
Each time all 3 files were overwriten and renamed to one file.
I believe this is happens because all exists checks fire sooner than any rename happens.
I know how to accomplish this task synchronously, but want to be sure that there is no proper async way to do so.
You can create Promise which resolve's when file is renamed
fs.rename(file, new_name, (err) => {
resolve(); <------
});
or when renaming is skipped
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) {
return fs.rename(file, new_name, (err) => {
resolve();
});
}
resolve(); <------
});
Full code
(async () => {
for (let file of files) {
await new Promise((resolve) => {
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) {
return fs.rename(file, new_name, (err) => {
resolve();
});
}
resolve();
});
});
}
})();
and if you don't want to mix async/await with Promise
(async () => {
function rename(file, new_name) {
return new Promise((resolve) => {
fs.access(new_name, fs.constants.F_OK, (err) => {
if (err) {
return fs.rename(file, new_name, (err) => {
resolve();
});
}
resolve();
});
});
}
for (let file of files) {
await rename(file, new_name);
}
})();
#ponury-kostek solution works brilliantly and marked as accepted answer.
I ended up with the following code since it's a bit shorter:
async function rename_files() {
for (let file of files)
await fs.move(file, new_name)
}
rename_files()
Instead of wrapping fs library in promises.
I like to import the promise implementation of the fs library.
Then call the fs methods with await.
.
import {promises as fs_promises} from 'fs'; // The promise implmentation of fs library.
async function renameFile() {
const fileFullPath = '1234.txt';
const newFileFullPath = '5678.txt';
await fs_promises.rename(fileFullPath, newFileFullPath, (error) => {
if (error) {
console.log(error);
} else {
console.log("\nFile Renamed\n");
}
});
}
await renameFile(); // Call the async method.

How do I refactor this composed function with Ramda.js?

i am writing a small utility using ramda and data.task that reads image files out of a directory and outputs their size. I got it working like so:
const getImagePath = assetsPath => item => `${assetsPath}${item}`
function readImages(path) {
return new Task(function(reject, resolve) {
fs.readdir(path, (err, images) => {
if (err) reject(err)
else resolve(images)
})
})
}
const withPath = path => task => {
return task.map(function(images) {
return images.map(getImagePath(path))
})
}
function getSize(task) {
return task.map(function(images) {
return images.map(sizeOf)
})
}
const getImageSize = dirPath => compose(getSize, withPath(dirPath), readImages)
The problem is with the withPath function that adds the correct image path to the image file name but forces my api to pass in the directoryName twice: once for reading the files and second time for reading the path. This means I have to call the getImageSize function like so:
const portfolioPath = `${__dirname}/assets/`
getImageSize(portfolioPath)(portfolioPath).fork(
function(error) {
throw error
},
function(data) {
console.log(data)
}
)
Is there any way to pass the dirname as a parameter only once? I want the api to work like this:
getImageSize(portfolioPath).fork(
function(error) {
throw error
},
function(data) {
console.log(data)
}
)
You shouldn't be building paths manually like that
One of Node's better APIs is the Path module – I would recommend that your readImages wrapper is made a generic readdir wrapper, and instead resolve an Array of path.resolve'd file paths
const readdir = dir =>
new Task ((reject, resolve) =>
fs.readdir (dir, (err, files) =>
err
? reject (err)
: resolve (files.map (f => path.resolve (dir, f)))
const getImagesSizes = dir =>
readdir (dir) .map (R.map (sizeOf))
Wrapping the Node continuation-passing style APIs just to return a Task gets to be a bother, doesn't it?
const taskify = f => (...args) =>
Task ((reject, resolve) =>
f (...args, (err, x) =>
err ? reject (err) : resolve (x)))
const readdir = (dir, ...args) =>
taskify (fs.readdir) (dir, ...args)
.map (R.map (f => path.resolve (dir, f)))
const getImagesSizes = dir =>
readdir (dir) .map (R.map (sizeOf))
You should probably also take care to file out file paths that are directories – unless your sizeOf implementation handles that
I managed to solve this by passing the Task resolution a single object like so:
function readImages(path) {
return new Task(function(reject, resolve) {
fs.readdir(path, (err, images) => {
if (err) reject(err)
else resolve({ images, path })
})
})
}
const withPath = task => {
return task.map(function({ images, path }) {
return images.map(getImagePath(path))
})
}
...and then destructing it out of the task payload and now my compose function looks like this:
module.exports = (function getImageSize(dirPath) {
return compose(getSize, withPath, readImages)
})()
And my api call looks like this:
getImageSize(portfolioPath).fork(
function(error) {
throw error
},
function(data) {
console.log(data)
}
)

NodeJS concatenate all files in a directory

Is there a faster or more succinct way to concatenate all of the files located in a directory using NodeJS?
In bash I could do something like this:
for file in $1
do
cat "$file"
echo
done > $2;
Here is what I'm doing now:
var fs = require('fs');
var Promise = require('bluebird');
module.exports = function(directory, destination) {
return new Promise((resolve, reject) => {
fs.readdir(directory, (err, files) => {
if (err) {
return reject(err);
}
(function next() {
var file = files.shift();
if (!file) {
return resolve();
}
fs.readFile(directory + '/' + file, (err, content) => {
if (err) {
return reject(err);
}
fs.appendFile(destination, '\n' + content, (err) => {
if (err) {
return reject(err);
}
return next();
});
});
})();
});
});
};
That ?
require('child_process').execSync('cat *').toString('UTF-8')
:D
Using the async library you can easily read files in parallel and then join the results.
const fs = require("fs");
const async = require("async");
const path = require("path");
module.exports = function(directory, destination) {
return new Promise((resolve, reject) => {
fs.readdir(directory, (err, files) => {
if (err)
return reject(err);
files = files.map(file => path.join(directory,file));
//Read all files in parallel
async.map(files, fs.readFile, (err, results) => {
if (err)
return reject(err);
//results[0] contents of file #1
//results[1] contents of file #2
//results[n] ...
//Write the joined results to destination
fs.writeFile(destination, results.join("\n"), (err) => {
if (err)
return reject(err);
resolve();
});
});
});
});
}
If you're going to use bluebird than you get the benefit of promisification. You can use promisifyAll() to convert all error first callback accepting async functions in the fs module to return a promise. You can read more about in the above promisification link.
The below code reads in all of the files as strings and then reduces all of their contents into a single string and writes that string to the destination.
Its probably best to not catch() any returned errors here. Rather, the caller should attach a catch() to handle any returned errors as they need.
const Promise = require('bluebird')
const fs = Promise.promisifyAll(require('fs'))
const path = require('path')
module.exports = (directory, destination) => {
return fs.readdirAsync(directory)
.map(file => fs.readFileAsync(path.join(directory, file), 'utf8'))
.then(contents => fs.writeFileAsync(destination, contents.join('\n')))
}
All in one line:
fs.readdirSync('./').forEach((file) => { if(fs.lstatSync(file).isFile()) fs.appendFileSync('./output.file', fs.readFileSync(file).toString()) })
Replace './' with target directory and './output.file' with target destination.
You can do the same thing without bluebird , since node 8.X includes the util package to promisify fs API.
This exemple shows how i use it in my project to concatenate minified files (so remove the filter if necessary).
You can do the same thing without bluebird , since node 8.X includes the util package to promisify fs API.
This exemple shows how i use it in my project to concatenate minified files (so remove the filter if necessary).
const {promisify} = require("util"); //requires node 8.X
const readdir = promisify(fs.readdir);
const readFile = promisify(fs.readFile);
const appendFile = promisify(fs.appendFile);
// Append all minified and obsfucated files in source directory
// The resulting file is generated in destination
function appendFile(directory, destination) {
readdir(directory)
.then((files) => {
console.log('FILES CONTENT:', files);
files.filter(file => {
console.log('FILTER > ' + file);
return (file.indexOf('-min.js') != -1 && file.indexOf('-min.js.map') == -1)
})
.map(file => {
console.log('MAP ('+destination+') > ' + path.join(directory, file));
readFile(path.join(directory, file), 'utf8')
.then(data => {
//console.log('DATA:', data);
appendFile(destination, data+'\n')
.then(() => {
console.log('append done');
})
.catch((err) => {
displayError(err);
});
});
});
})
.catch((err) => {
console.log('ERROR:', err);
displayError(err);
});
}

Categories