Node.js module sync to async - javascript

I have a simple Node.js module that takes a directory of Handlebars templates, compiles them and then exports an object of them keyed on their name:
'use strict';
var
fs = require('fs'),
path = require('path'),
handlebars = require('handlebars'),
templateDir = __dirname + '/templates/',
templates = {};
fs.readdirSync(templateDir).forEach(function (file) {
templates[path.basename(file, path.extname(file))] =
handlebars.compile(fs.readFileSync(templateDir + file));
});
module.exports = templates;
So then in other modules I can just:
var templates = require('./templates');
templates[SOME_TEMPLATE]({ ... });
I'm struggling on how to do this asynchronously though, specifically how to export with an emitter in the mix.
var emitter = require('events').EventEmitter;
module.exports = function (callback) {
emitter.on('templates-compiled', function () {
callback(templates);
});
callback();
};
fs.readdir(templateDir, function (err, files) {
if (!err) {
files.forEach(function (file) {
fs.readFile(templateDir + file, function(err, data) {
if (!err) {
templates[path.basename(file, path.extname(file))] =
handlebars.compile(data);
if (files[files.length - 1] === file) {
emitter.emit('templates-compiled');
}
} else {
}
});
});
} else {
}
});

Heres a modification of the above that works:
var EventEmitter = require('events').EventEmitter;
var path = require('path');
var fs = require('fs');
module.exports = function(dir, callback) {
var emitter = new EventEmitter();
var templates = {};
emitter.on('templates-compiled', function(t) {
callback(null, t);
});
emitter.on('templates-error', function(e) {
callback(e);
});
fs.readdir(dir, function(err, files) {
if (!err) {
files.forEach(function(file) {
fs.readFile(path.join(dir, file), function(err, data) {
if (!err) {
//note: i'm just calling toString() here - do your compile here instead
templates[path.basename(file, path.extname(file))] = data.toString();
if (files[files.length - 1] === file) {
emitter.emit('templates-compiled', templates);
}
} else {
emitter.emit('templates-error', err);
}
});
});
} else {
emitter.emit('templates-error', err);
}
});
};
However you may want to look at using q or async to trim down the pyramid of doom.

Related

Replacing line in a file using nodejs

I need to find a line in a file and replace it with a new one, all using NodeJS.
Here's what I've done to achieve that:
var fs = require('fs');
fs.readFile('infra_setting.conf', 'utf-8', function(err, data){
if (err) throw err;
console.log(data)
});
var fs = require('fs')
fs.readFile('myfile.conf', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
var result = data.replace(/example/g, 'example: 12345678');
fs.writeFile('myfile.conf', result, 'utf8', function (err) {
if (err) return console.log(err);
});
});
The problem I have is that the string of the line keeps changing.
1st time => example : 2222
2nd time => example : somthing else
Is there a way to localize the line and replace it by NodeJS ?
Here's the example where i changed line from .gitignore file
// simulate fs.readFileSync('./gitignore', { encoding: 'utf8' })
const fileContent = 'node_modules\r\npackage-lock.json\r\nyarn.lock\r\n*.code-workspace\r\n'
function changeLine(content, lineString, newLineString) {
const delimeter = '\r\n'
const parts = content.split(delimeter).filter(v => v.length)
const lineIndex = parts.findIndex(v => v.includes(lineString))
parts[lineIndex] = newLineString
return parts.join(delimeter)
}
console.log(fileContent)
const change1 = changeLine(fileContent, 'node_modules', 'not_node_modules')
console.log(change1)
const change2 = changeLine(change1, 'package-lock.json', '.vscode')
console.log(change2)
try and tell me if it works or not
var fs = require('fs')
function searchReplaceFile(regexpFind, replace, FileName) {
var file = fs.createReadStream(FileName, 'utf8');
var newDATA= '';
file.on('data', function (chunk) {
newDATA+= chunk.toString().replace(regexpFind, replace);
});
file.on('end', function () {
fs.writeFile(FileName, newDATA, function(err) {
if (err) {
return console.log(err);
} else {
console.log('Updated!');
}
});
});
searchReplaceFile(/example/g, 'example: 12345678', 'infra_setting.conf');

How to fetch the image files from a server and zip it in sailsjs

I want to zip all the images from s3 urls. I am doing it on server side on sailsjs framework.
I tried using axios to download the images and used 'zipdir'. The images are getting downloaded in temp folder. But its not getting zipped properly.
this.downloadFiles = function (req, res) {
var resObj = {}
async.waterfall([
this.createFolder.bind(undefined, req),
this.downloadFilesAxios.bind(undefined, req),
this.zipTheFiles.bind(undefined, req)
], function final(err, result) {
if (err) {
console.log('SOME ERROR', err);
resObj.statusCode = err.statusCode || 500;
} else {
resObj.statusCode = 200;
resObj.result = result.questionList;
}
console.log('------', resObj.statusCode)
resObj.messageKey = sails.config.statusCode[resObj.statusCode].key;
resObj.message = sails.config.statusCode[resObj.statusCode].message;
return res.send(resObj);
});
};
}
this.downloadFilesAxios = function (req, obj, callback) {
SurveyDocs.find({ surveyId: req.body.surveyId })
.exec(function (err, docsDetails) {
async.map(docsDetails, function (img, cb) {
const url = img.docS3Url;
let imageName = img.docFileName;
const path = Path.resolve(__dirname, "temp", imageName);
const writer = Fs.createWriteStream(path)
Axios({
method: 'get',
url: url,
responseType: 'stream'
})
.then(function (response) {
response.data.pipe(writer)
})
writer.on('finish', (done) => {
console.log('success!!!');
cb(null, null)
});
writer.on('error', (err) => {
console.log('failed!!!');
cb(err, null)
});
}, (err, data) => {
if (err) {
console.log('errrr', err);
}
callback(null, obj);
});
})
};
this.zipTheFiles = function (req, obj, callback) {
var surveyId = req.body.surveyId;
var tempDir = 'assets/zip/' + surveyId + '.zip'
zipdir('temp', { saveTo: tempDir }, function (err, buffer) {
callback(null, obj);
});
callback(null, obj);
}
Here I am getting a corrupt zip file. Please suggest the solution.
I tried out your example there are a few things you need to consider in order to make it work.
const async = require('async');
const fs = require('fs');
const path = require('path');
const zipDir = require('zip-dir');
const axios = require('axios');
let writer;
async.waterfall([
createFolder,
downLoadFileAxios,
zip
], function (err, result) {
if (err) {
console.log(err);
} else {
console.log('result :', result);
}
});
let's assume this method creates the temp folder
function createFolder(callback) {
setTimeout(function() {
callback(null, 'temp');
}, 1000);
}
Here the writeStream object and it's events should be put inside the then block. So that it writes the stream to the file correctly.
Another important thing here is you are not having a cath block attached the promise, so if any exception occurs it will be simply eaten up.
function downLoadFileAxios(dirPath, callback) {
// Hard coded the images url for the sake of simplicity
let files = [
'https://free-images.com/lg/be5e/climbing_helmets_climbing_equipment.jpg',
'https://free-images.com/lg/87ce/lilac_lilac_bush_lilac.jpg'
];
async.mapSeries(files, function(img, cb) {
let name = img.slice(img.lastIndexOf('/') + 1);
let imagePath = path.resolve(__dirname, "newDir", name);
writer = fs.createWriteStream(imagePath);
axios({
method: 'get',
url: img,
responseType: 'stream'
}).
then(function(response) {
response.data.pipe(writer);
writer.on('finish', (done) => {
console.log('success!!!');
cb(null, null)
});
writer.on('error', (err) => {
console.log('failed!!!');
cb(err, null)
});
})
.catch((err) => {
console.log(err);
})
}, function(err, result) {
if (err) {
console.log('errrr', err);
}
callback(null, 'done downloading');
})
}
function zip (dirPath, callback) {
let zipPath = path.resolve(__dirname, "assets", "file.zip");
// console.log(`got directory path : ${dirPath}`);
zipDir("newDir", {
saveTo: zipPath
}, function(err, buffer) {
if(err) {
callback(err, null);
} else {
callback(null, 'done');
}
});
}
This can be easily done using Async/Await like following.
const async = require('async');
const fs = require('fs');
const path = require('path');
const zipDir = require('zip-dir');
const axios = require('axios');
var writer;
// faking the directory creation part
async function createFolder(callback) {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve(true);
}, 2000);
});
}
//Executes in the specified order.
(async () => {
await createFolder();
await downLoadFile();
await zipTheFile();
})();
async function downLoadFile() {
let files = [
'https://free-images.com/lg/be5e/climbing_helmets_climbing_equipment.jpg',
'https://free-images.com/lg/87ce/lilac_lilac_bush_lilac.jpg'
];
for(let i= 0; i<files.length; i++) {
await downLoadFileAxios(files[i]);
}
}
async function downLoadFileAxios(url) {
let name = url.slice(url.lastIndexOf('/') + 1);
let imagePath = path.resolve(__dirname, "newDir", name);
let writer = fs.createWriteStream(imagePath);
const response = await axios({
url,
method: 'GET',
responseType: 'stream'
})
response.data.pipe(writer)
return new Promise((resolve, reject) => {
writer.on('finish', resolve)
writer.on('error', reject)
})
}
function zipTheFile () {
let zipPath = path.resolve(__dirname, "assets", "file.zip");
return new Promise((resolve, reject) => {
zipDir("newDir", {
saveTo: zipPath
}, function(err, buffer) {
if(err) {
return reject(err);
}
return resolve('done');
});
})
}
Hope this helps!.

How to display contents of all files from a folder in console using node.js or express

I couldn't get contents of files from a folder using Nodejs
I am getting contents of one file using read function but not all files at once.
I hope this is correct.
const testFolder = './tests/';
const fs = require('fs');
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
fs.readFile(file, 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
console.log(data);
});
});
})
I got the answer. Here my solution.
function uAll() {
var absPath = __dirname + "/Assignment1/" + "../data/users/";
console.log(absPath);
fs.readdir(absPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
files.forEach(function (file) {
// console.log(file);
var phone = file.split(".");
fops.read('users', phone[0], function (err, newObj) {
if (!err && newObj) { // Read is successful
console.log("Read User: ", newObj);
}
else { // Error in reading
console.log("User not found");
}
});
});
});
}
You did a good job. I just want to share my idea.
const lib = {};
lib.base = "/Assignment1/" + "../data/users/";
lib.read = function(dir, file, callback) {
fs.readFile(lib.base + dir + '/' + file + '.json', 'utf-8', function(
err,
data
) {
if (!err && data) {
const parsedData = helpers.parseJsonToObject(data);
callback(false, parsedData);
} else {
callback(err, data);
}
});
};
lib.list = function(dir, callback) {
fs.readdir(lib.base + dir + '/', function(err, data) {
if (!err && data && data.length > 0) {
let trimmedFileName = [];
data.forEach(fileName => {
trimmedFileName.push(fileName.replace('.json', ''));
});
callback(false, trimmedFileName);
} else {
callback(err, data);
}
});
};

Getting error can not get header after they send when read files from directory?

I am trying to get the name and created date of the files. In the code below it throws error when I call the api. It is reading the directory and printing all the file names but it's not sending back to callback. Any idea what is implemented wrong?
service.js
var fs = require('fs');
var path = require('path');
var async = require('async');
var currentDate = new Date();
var objToReturn = [];
var logsDirectory = './logs'
function readDirectory(env, callback) {
fs.readdir(logsDirectory + '/' + env, function(err, files) {
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + '/' + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
});
},
function(err) {
if (err) {
console.info('error', err);
return;
}
// when you're done reading all the files, do something...
console.log('before Callback', objToReturn);
callback(objToReturn);
});
}
exports.readDirectory = readDirectory;
app.js
var stDirectory = require('./app/serverfiles/stDir');
app.get('/getAllFiles',function(req,res){
var env = req.query.env
console.log('printing',env);
stDirectory.readDirectory(env,function(files){
res.json(files);
console.log('Api files',files);
});
});
There are a few issues:
instead of passing the "final" handler to async.eachSeries(), you're passing it to fs.readdir(), so callback will never get called;
you're declaring objToReturn outside of the function, which isn't a good idea because multiple requests could be handled in parallel;
you're not handling any errors properly;
you should really use the Node.js callback idiom of calling callbacks with two arguments, the first being errors (if there are any) and the second being the result of the asynchronous operation.
The code below should fix these issues:
function readDirectory(env, callback) {
let objToReturn = [];
fs.readdir(
logsDirectory + "/" + env,
function(err, files) {
if (err) return callback(err);
// loop through each file
async.eachSeries(files, function(file, done) {
var dirPath = logsDirectory + "/" + env;
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't exist");
return done(err);
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
objToReturn.push(fileInfo);
done();
}
});
}, function(err) {
if (err) {
console.info("error", err);
return callback(err);
}
// when you're done reading all the files, do something...
console.log("before Callback", objToReturn);
callback(null, objToReturn);
}
);
}
// To call it:
stDirectory.readDirectory(env, function(err, files) {
if (err) {
res.sendStatus(500);
} else {
res.json(files);
console.log('Api files',files);
}
});
You should also consider using async.mapSeries() instead of async.eachSeries() and using a separate array (objToReturn).

Bulk upload attachments to couchDB using node.js nano module

I'm trying to bulk upload attachments to CouchDB using node.js and nano.
First, the walk module is used to find all files in upload folder and create array from them.
Next, each file from the array is supposed to be inserted into CouchDB via pipe and nano module.
However, the final result is that only one attachment has been uploaded.
var nano = require('nano')('http://localhost:5984')
var alice = nano.use('alice');
var fs = require('fs');
var walk = require('walk');
var files = [];
// Walker options
var walker = walk.walk('./uploads', {
followLinks: false
});
// find all files and add to array
walker.on('file', function (root, stat, next) {
files.push(root + '/' + stat.name);
next();
});
walker.on('end', function () {
// files array ["./uploads/2.jpg","./uploads/3.jpg","./uploads/1.jpg"]
files.forEach(function (file) {
//extract file name
fname = file.split("/")[2]
alice.get('rabbit', {revs_info: true}, function (err, body) {
fs.createReadStream(file).pipe(
alice.attachment.insert('rabbit', fname, null, 'image/jpeg', {
rev: body._rev
}, function (err, body) {
if (!err) console.log(body);
})
)
});
});
});
This is because you are mixing an asynchronous api with assumptions of this being synchronous.
After the first request you will get conflicts, cause the rabbit document has changed.
Can you confirm this using NANO_ENV=testing node yourapp.js?
I recommend using async if this is the problem
var nano = require('nano')('http://localhost:5984')
var alice = nano.use('alice');
var fs = require('fs');
var walk = require('walk');
var files = [];
// Walker options
var walker = walk.walk('./uploads', {
followLinks: false
});
walker.on('file', function (root, stat, next) {
files.push(root + '/' + stat.name);
next();
});
walker.on('end', function () {
series(files.shift());
});
function async(arg, callback) {
setTimeout(function () {callback(arg); }, 100);
}
function final() {console.log('Done');}
function series(item) {
if (item) {
async(item, function (result) {
fname = item.split("/")[2]
alice.get('rabbit', { revs_info: true }, function (err, body) {
if (!err) {
fs.createReadStream(item).pipe(
alice.attachment.insert('rabbit', fname, null, 'image/jpeg', {
rev: body._rev
}, function (err, body) {
if (!err) console.log(body);
})
)
}
});
return series(files.shift());
});
}
else {
return final();
}
}

Categories