How to receive multiple requests with Express.JS - javascript

I'm writing an Angular 6 + Express.JS app and now I stuck with the following problem: when there are some multiple requests made at the same time, sometimes (especially when there are more than 4 requests) all of them response with 404 or even get cancelled. Is there any problem with the way I handle requests in Express or I should add some tweaks for concurrent requests?
Requests:
let requests = [];
files.forEach((file) => {
if (file.type.toLowerCase().includes('zip')) {
requests.push(this.imagesService.uploadArchive(file).pipe(first()));
} else {
requests.push(this.imagesService.saveImage(file).pipe(first()));
}
});
forkJoin(requests).subscribe(
(res) => res.forEach(response => {
this.onSave.emit(response);
}),
(error) => {
console.error(error);
},
() => {
this.close.emit();
}
);
Express handling routes:
router.post('/images',
formidable({
encoding: 'utf-8',
uploadDir: path.resolve(__dirname, '..', '..', 'uploads'),
multiples: true,
keepExtensions: true
}),
(req, res, next) => {
const image = req.fields;
const data = req.files;
image.path = data.image.path;
const file = fs.createReadStream(image.path);
saveImage(image).then(
result => {
if (result) {
res.status(200).send(result);
} else {
console.error("Cannot save image");
res.status(400).send("Cannot save image");
}
}).catch(e => console.error(e.stack));
});
Responses:
UPDATE
router.post('/archives',
formidable({
encoding: 'utf-8',
uploadDir: path.resolve(__dirname, '..', '..', 'uploads'),
multiples: true,
keepExtensions: true
}),
(req, res, next) => {
const data = req.files;
let promises = [];
fs.readFile(data.archive.path, async (err, archive) => {
if (err) throw err;
await extractImagesFromZip(archive, data.archive.path).then((images) =>
images.forEach((image) => {
promises.push(
saveImage(image).then(
result => {
if (result) {
result.path = result.path.split('/').pop();
return result;
} else {
console.error("Cannot save image " + image.name);
fs.unlink(image.path, () => {});
}
}).catch(e => {
fs.unlink(image.path, () => {});
console.error(e.stack)
})
);
})
);
Promise.all(promises)
.then((result) => {
if (result.length > 0) {
res.status(200).send(result)
} else {
res.status(400).send("None images were saved")
}
}).catch((error) => {
console.log(error.stack);
res.status(400).send("None images were saved")
});
});
}
);
export const extractImagesFromZip = (file, link) => {
let promises = [];
var zip = new JSZip();
return zip.loadAsync(file)
.then((archive) => {
Object.values(archive.files).filter(
f =>
['.jpg', '.jpeg', '.png'].some((suffix) => f.name.toLowerCase().endsWith(suffix))
&& ![...f.name.toLowerCase().split('/')].pop().startsWith('.')
&& !f.dir
).forEach(f => promises.push(zip.file(f.name).async('nodebuffer').then((content) => {
const ext = f.name.split('.').pop().toLowerCase();
var dest = path.resolve(__dirname, '..', '..') + '/uploads/upload_'
+ crypto.randomBytes(Math.ceil(1322)).toString('hex').slice(0, 32).toLowerCase()
+ '.' + ext;
return new Promise((res, rej) => {
fs.writeFile(dest, content, (err) => {
if (err) rej(err);
res(new Promise((resolve, reject) => {
fs.readFile(dest, (erro, data) => {
if (erro) reject(erro);
if (data) resolve({
name: f.name,
type: 'image/' + (ext === 'jpg' ? 'jpeg' : ext),
path: dest
});
});
}));
});
});
})));
fs.unlink(link, () => {});
return Promise.all(promises);
});
}
export const saveImage = (image) => {
return database.raw(
"INSERT INTO images (name, type, path) " +
"VALUES (?, ?, ?) " +
"RETURNING name, type, path, id",
[image.name, image.type, image.path]
).then(data => data.rows[0]).catch(e => console.error(e.stack));
};
UPDATE 2
Everything works fine if user and server are on localhost (regardless server running with nginx or without it), but problem appears when server is remote

Such code worked
public async uploadFiles(files: File[]) {
of(files)
.pipe(
concatMap(files =>
files.map(file => {
return this.imagesService
.saveImage(file)
.pipe(
map(),
catchError((error, caught) => {
console.error(error);
return empty();
})
);
})
),
concatAll(),
toArray(),
map(res => console.log)
)
.subscribe();
}

Related

Node.js AWS Lambda putObject doesn't seem to finish

I have a Lambda function that is meant to download a directory of files from s3, convert them, delete the old files, and upload the new output files back to s3. The output for each file will be at least one file and a folder.
Everything seems to be working as intended, except for the upload. No errors are thrown, it just ends without putting.
I'm a novice, so feel free to point out I've done it all wrong.
exports.handler = async ({ dirName }) => {
// const jsonIn = JSON.parse(event.body);
// const dirName = jsonIn.dirName;
const localDir = `/tmp/${dirName}`;
const params = {
Bucket: 'to-pdf-test',
Delimiter: '/',
Prefix: dirName + '/',
StartAfter: dirName + '/'
};
var s3List;
var localList = [];
execSync(`mkdir ${localDir}`);
try {
s3List = await s3.listObjectsV2(params).promise();
} catch (e) {
throw e;
}
await Promise.all(
s3List.Contents.map(async (file) => {
let f = await getFiles(file);
localList.push(f);
})
).then(res => {console.log('Get Successful' + res) } )
.catch(err => {console.log('error' + err) } );
await Promise.all(
localList.map(async (file) => {
convertFile(file);
})
).then(res => {console.log('Convert Successful' + res) } )
.catch(err => {console.log('error' + err) } );
dirSync(localDir, async (filePath, stat) => {
let bucketPath = filePath.substring(5);
let uploadParams = { Bucket: 'to-pdf-test',
Key: `${bucketPath}`,
Body: fs.readFileSync(filePath) };
console.log('DS fPath ' + filePath);
console.log('DS bPath ' + bucketPath);
console.log(uploadParams.Body);
try {
let res = await s3.putObject(uploadParams).promise();
console.log('Upload Complete', res);
} catch (e) {
console.log('Error', e);
}
});
};
async function getFiles(file) {
let filePath = `/tmp/${file.Key}`;
let fileParams = {
Bucket: 'to-pdf-test',
Key: file.Key
};
try {
const { Body: inputFileBuffer } = await s3.getObject(fileParams).promise();
fs.writeFileSync(filePath, inputFileBuffer);
} catch (e) {
throw (e);
}
return filePath;
}
function convertFile(file) {
const noPath = getFilename(file);
const fPath = getFilePath(file);
if (path.extname(noPath) === '.msg') {
execSync(`cd ${fPath} && ${command} ${noPath}`);
} else {
console.log(`${noPath} not run. Not .msg`);
}
fs.unlinkSync(file);
}
function getFilename(fullPath) {
return fullPath.replace(/^.*[\\\/]/, '');
}
function getFilePath(fullPath) {
return fullPath.substring(fullPath.lastIndexOf('/'), 0);
}
function dirSync(dirPath, callback) {
fs.readdirSync(dirPath).forEach((name) => {
var filePath = path.join(dirPath, name);
var stat = fs.statSync(filePath);
if (stat.isDirectory()) {
dirSync(filePath, callback);
} else {
callback(filePath, stat);
}
});
}
I had the upload working in a previous version of this function, so thanks to this post for when it was working.
My solution for the moment - Read the local directory separately, push the paths of the files to localList then .map the array with all the paths to upload them.
localList = [];
//read dir and push to localList array
await dirSync(localDir, (filePath, stat) => {
localList.push(filePath);
});
console.log(localList);
await Promise.all(
localList.map( async (file) => {
let bucketPath = file.substring(5);
let uploadParams = {
Bucket: 'to-pdf-test',
Key: bucketPath,
Body: fs.readFileSync(file) };
console.log('Uploading', file);
await s3.putObject(uploadParams).promise()
.then((res) => {console.log('Upload Successful', bucketPath) } )
.catch((err) => {console.log('error' + err) } );
})
);
If there is better (or proper) way to do this, someone let me know :)

Mapping data form database to an array of objects in JS

I have this code in App.js
const getPlayers = async()=>{
const players = await API.getPlayers();
setPlayers(players)
}
getPlayers()
This code in my API.js file
const getPlayers = async () => {
return getJson(
fetch(SERVER_URL + 'users', { credentials: 'include'})
).then( json => {
return json.map((user) => {
return {
id: user.id,
name: user.name,
rank: user.rank
}
})
})
}
This code in my server.js file
app.get('/api/players',
(req, res) => {
riddleDao.getPlayers()
.then(async players => {
res.json(players)
})
.catch((err) => res.status(500).json(err));
});
and finally, this in my DataAccessObject.js file
exports.getPlayers = () => {
return new Promise((resolve, reject) => {
const sql = 'SELECT * FROM users';
db.all(sql, [], (err, rows) => {
if (err) { reject(err); return; }
else {
const players = rows.map(row => {
return {
id: row.id,
name: row.name,
rank: row.rank
}
})
resolve(players);
}
});
});
};
but i am getting this error:
I am expecting to get an array of object in my App.js when i call the getPlayer() function and the objects in the array should have id, name and rank of the players in my db table
I think you've got "users" in your fetch URL when it should be "players".
fetch(SERVER_URL + 'users', { credentials: 'include'})
should be
fetch(SERVER_URL + 'players', { credentials: 'include'})
your api endpoint differs from the url you are sending requests
app.get('/api/players',
you are listening to "players" but
fetch(SERVER_URL + 'users', { credentials: 'include'})
you are fetching "users"

converting req.body to an array

I have a file input to upload multiple images and saving them to MongoDB, but the images uploaded to me as a string, I need to upload them with an array. I update it with the server code...
router.post('/multiple-upload',uploadImages,resizeImages,getResultImages , (req, res, next) => {
const thing = new Thing({
filename: req.body.filename,
images: req.body.images
});
thing.save().then(
() => {
res.status(201).json({
message: 'Post saved successfully!'
});
}
).catch(
(error) => {
res.status(400).json({
error: error
});
}
);
});
const uploadFiles = upload.array('images', 10);
exports.uploadImages = (req, res, next) => {
uploadFiles(req, res, (err) => {
if (err instanceof multer.MulterError) {
if (err.code === 'LIMIT_UNEXPECTED_FILE') {
return res.send('Too many files to upload !');
}
} else if (err) {
return res.send(err);
}
next();
});
};
exports.resizeImages = async (req, res, next) => {
if (!req.files) return next();
req.body.images = [];
await Promise.all(
req.files.map(async (files,i) => {
// const filename = files.originalname.replace(/\..+$/, '');
const newFilename = `tour-${files.originalname}-${Date.now()}-${i + 1}.jpeg`;
await sharp(files.buffer)
.resize(640, 320)
.toFormat('jpeg')
.jpeg({ quality: 90 })
.toFile(`upload/${newFilename}`);
req.body.images.push(newFilename);
// console.log(req.body.images);
})
);
next();
};
exports.getResultImages = async (req, res,next) => {
if (req.body.images.length <= 0) {
return res.send(`You must select at least 1 image !`);
}
const images = req.body.images.map((image) => '' + image + '').join(', ');
// res.status(200).json(req.body.images)
req.body.images = images;
res.locals.images = images;
console.log();
next();
return res.json(req.body.files);
// res.status(200).json({
// data:{
// images
// }
// });
};
I have a file input to upload multiple images and saving them to MongoDB, but the images uploaded to me as a string, I need to upload them with an array. I update it with the server code...
it works with me by deleting join(', ')
in this line const images = req.body.images.map((image) => '' + image + '').join(', ');

Extract matching row by comparing two CSV file in NodeJs

The scenario is I have two large CSV files csv1.csv and csv2.csv. In both the files, there is an email column and I have to read csv1.csv row by row and check if the email exists in csv2.csv and if matches write the row of csv2.csv in csv3.csv. I have tried read stream as well but it is not working as expected. Any guidance or help is appreciated.
Thanks to all in advance.
Following are the CSV files
csv1.csv
email,header1,header2
test1#example.com,test1,test1
test2#example.com,test2,test2
test3#example.com,test3,test3
test4#example.com,test4,test4
test5#example.com,test5,test5
csv2.csv
email,header1,header2
test4#example.com,test4,test4
test5#example.com,test5,test5
test6#example.com,test6,test6
test7#example.com,test7,test7
test8#example.com,test8,test8
Following is the code that I tried
const fs = require('fs');
const csv = require('fast-csv')
class CsvHelper {
static write(filestream, rows, options) {
return new Promise((res, rej) => {
csv.writeToStream(filestream, rows, options)
.on('error', err => rej(err))
.on('finish', () => res());
});
}
constructor(opts) {
this.headers = opts.headers;
this.path = opts.path;
this.writeOpts = {
headers: this.headers,
includeEndRowDelimeter: true
};
}
create(rows) {
return CsvHelper.write(fs.createWriteStream(this.path, { flags: 'a' }), rows, { ...this.writeOpts });
}
append(rows) {
return CsvHelper.write(fs.createWriteStream(this.path, { flags: 'a' }), rows, {
...this.writeOpts,
writeHeaders: false,
});
}
}
class Helper {
async matchCsv (outerRow) {
try {
const filePath2 = "csv2.csv";
const filePath3 = "csv3.csv";
let row = [];
const csvFile = new CsvHelper({
path: filePath3,
headers: ["Email", "Active"]
});
return new Promise((resolve, reject) => {
fs.createReadStream(filePath2)
.on("error", err => {
reject(err);
})
.pipe(csv.parse({headers: true}))
.on("error", err => {
reject(err);
})
.on("data", async innerRow => {
if(outerRow["email"] === innerRow["email"]) {
console.log("====================");
console.log("match found");
console.log(innerRow);
console.log("====================");
row.push([innerRow["email"], "yes"]);
console.log("row: ", row);
}
})
.on("finish", async() => {
if (!fs.existsSync(filePath3)) {
await csvFile.create(row).then(() => {
resolve("Done from matchCsv");
})
} else {
await csvFile.append(row).then(() => {
resolve("Done from matchCsv");
})
}
})
});
} catch (err) {
throw(err);
}
}
async generateCsv () {
try {
const filePath1 = "csv1.csv";
return new Promise((resolve, reject) => {
fs.createReadStream(filePath1)
.on("error", err => {
reject(err);
})
.pipe(csv.parse({headers: true}))
.on("error", err => {
reject(err);
})
.on("data", async outerRow => {
const result = await this.matchCsv(outerRow);
console.log("result: ", result);
})
.on("finish", () => {
resolve("Generated csv3.csv file.");
});
});
} catch (err) {
throw(err);
}
}
}
async function main() {
const helper = new Helper();
const result = await helper.generateCsv()
console.log(result);
}
main();
So the question is a little confusing, but I think I know what you want. Here's what I would do to check if the email exists. It will add all the rows to an array, cycle through them, then if the email address matches the email you're looking for, it will do something else... I think you said you wanted to write to a csv file again with the row, but that should be simple enough.
const csv = require('csv-parser');
const fs = require('fs');
const filepath = "./example_data.csv";
const emailAdd = "myemail#email.com";
var rowsArr = [];
fs.createReadStream(filepath)
.on('error', () => {
// handle error
})
.pipe(csv())
.on('data', (row) => {
rowsArr.push(row);
})
.on('end', () => {
for (var i = 0; i <= rowsArr.length; i++) {
if (rowsArr[i].emailAddress == emailAdd) {
//do something
}
}
})

How to fetch the image files from a server and zip it in sailsjs

I want to zip all the images from s3 urls. I am doing it on server side on sailsjs framework.
I tried using axios to download the images and used 'zipdir'. The images are getting downloaded in temp folder. But its not getting zipped properly.
this.downloadFiles = function (req, res) {
var resObj = {}
async.waterfall([
this.createFolder.bind(undefined, req),
this.downloadFilesAxios.bind(undefined, req),
this.zipTheFiles.bind(undefined, req)
], function final(err, result) {
if (err) {
console.log('SOME ERROR', err);
resObj.statusCode = err.statusCode || 500;
} else {
resObj.statusCode = 200;
resObj.result = result.questionList;
}
console.log('------', resObj.statusCode)
resObj.messageKey = sails.config.statusCode[resObj.statusCode].key;
resObj.message = sails.config.statusCode[resObj.statusCode].message;
return res.send(resObj);
});
};
}
this.downloadFilesAxios = function (req, obj, callback) {
SurveyDocs.find({ surveyId: req.body.surveyId })
.exec(function (err, docsDetails) {
async.map(docsDetails, function (img, cb) {
const url = img.docS3Url;
let imageName = img.docFileName;
const path = Path.resolve(__dirname, "temp", imageName);
const writer = Fs.createWriteStream(path)
Axios({
method: 'get',
url: url,
responseType: 'stream'
})
.then(function (response) {
response.data.pipe(writer)
})
writer.on('finish', (done) => {
console.log('success!!!');
cb(null, null)
});
writer.on('error', (err) => {
console.log('failed!!!');
cb(err, null)
});
}, (err, data) => {
if (err) {
console.log('errrr', err);
}
callback(null, obj);
});
})
};
this.zipTheFiles = function (req, obj, callback) {
var surveyId = req.body.surveyId;
var tempDir = 'assets/zip/' + surveyId + '.zip'
zipdir('temp', { saveTo: tempDir }, function (err, buffer) {
callback(null, obj);
});
callback(null, obj);
}
Here I am getting a corrupt zip file. Please suggest the solution.
I tried out your example there are a few things you need to consider in order to make it work.
const async = require('async');
const fs = require('fs');
const path = require('path');
const zipDir = require('zip-dir');
const axios = require('axios');
let writer;
async.waterfall([
createFolder,
downLoadFileAxios,
zip
], function (err, result) {
if (err) {
console.log(err);
} else {
console.log('result :', result);
}
});
let's assume this method creates the temp folder
function createFolder(callback) {
setTimeout(function() {
callback(null, 'temp');
}, 1000);
}
Here the writeStream object and it's events should be put inside the then block. So that it writes the stream to the file correctly.
Another important thing here is you are not having a cath block attached the promise, so if any exception occurs it will be simply eaten up.
function downLoadFileAxios(dirPath, callback) {
// Hard coded the images url for the sake of simplicity
let files = [
'https://free-images.com/lg/be5e/climbing_helmets_climbing_equipment.jpg',
'https://free-images.com/lg/87ce/lilac_lilac_bush_lilac.jpg'
];
async.mapSeries(files, function(img, cb) {
let name = img.slice(img.lastIndexOf('/') + 1);
let imagePath = path.resolve(__dirname, "newDir", name);
writer = fs.createWriteStream(imagePath);
axios({
method: 'get',
url: img,
responseType: 'stream'
}).
then(function(response) {
response.data.pipe(writer);
writer.on('finish', (done) => {
console.log('success!!!');
cb(null, null)
});
writer.on('error', (err) => {
console.log('failed!!!');
cb(err, null)
});
})
.catch((err) => {
console.log(err);
})
}, function(err, result) {
if (err) {
console.log('errrr', err);
}
callback(null, 'done downloading');
})
}
function zip (dirPath, callback) {
let zipPath = path.resolve(__dirname, "assets", "file.zip");
// console.log(`got directory path : ${dirPath}`);
zipDir("newDir", {
saveTo: zipPath
}, function(err, buffer) {
if(err) {
callback(err, null);
} else {
callback(null, 'done');
}
});
}
This can be easily done using Async/Await like following.
const async = require('async');
const fs = require('fs');
const path = require('path');
const zipDir = require('zip-dir');
const axios = require('axios');
var writer;
// faking the directory creation part
async function createFolder(callback) {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve(true);
}, 2000);
});
}
//Executes in the specified order.
(async () => {
await createFolder();
await downLoadFile();
await zipTheFile();
})();
async function downLoadFile() {
let files = [
'https://free-images.com/lg/be5e/climbing_helmets_climbing_equipment.jpg',
'https://free-images.com/lg/87ce/lilac_lilac_bush_lilac.jpg'
];
for(let i= 0; i<files.length; i++) {
await downLoadFileAxios(files[i]);
}
}
async function downLoadFileAxios(url) {
let name = url.slice(url.lastIndexOf('/') + 1);
let imagePath = path.resolve(__dirname, "newDir", name);
let writer = fs.createWriteStream(imagePath);
const response = await axios({
url,
method: 'GET',
responseType: 'stream'
})
response.data.pipe(writer)
return new Promise((resolve, reject) => {
writer.on('finish', resolve)
writer.on('error', reject)
})
}
function zipTheFile () {
let zipPath = path.resolve(__dirname, "assets", "file.zip");
return new Promise((resolve, reject) => {
zipDir("newDir", {
saveTo: zipPath
}, function(err, buffer) {
if(err) {
return reject(err);
}
return resolve('done');
});
})
}
Hope this helps!.

Categories