so below is my API code which receive two image files one containing only one image and other contains 5 images. then I am using a hosting website called cloudinary to upload them and receive a result which includes the URL to the uploaded image. I am trying to push those result into urlArray so later on I can save them to database. However, the code for database executes before everything else. also when I log urlArray at the end . it is just an empty array. I was wondering what am I doing wrong here?
const upload = multer({
storage: multer.diskStorage({
destination: "./public/uploads",
filename: (req, file, cb) => cb(null, file.originalname),
}),
});
const apiRouter = nc({
onNoMatch(req, res) {
res.statusCode(405), json({ error: `method ${req.method} not allowed` });
},
});
const arrayOfImages = upload.fields([
{ name: "cardImage", maxCount: 1 },
{ name: "images", maxCount: 5 },
]);
apiRouter.post(arrayOfImages, (req, res) => {
const urlArray = [];
let cardImage = "";
const imagesArray = req.files["images"];
cloudinary.uploader.upload(
req.files["cardImage"][0].path,
{ folder: "cardImages" },
async (err, result) => {
if (err) console.log(err);
cardImage = result.secure_url;
console.log(`this is the first call ${cardImage}`);
fs.unlinkSync(req.files["cardImage"][0].path);
}
);
for (let i = 0; i < imagesArray.length; i++) {
cloudinary.uploader.upload(
imagesArray[i].path,
{ folder: "Images" },
async (err, result) => {
if (err) console.log(err);
urlArray.push(result.secure_url);
fs.unlinkSync(req.files["images"][i].path);
// TODO : need to implement the data save to database
}
);
}
dataBaseConnection();
const userItem = new Item({
shortDescription: req.body.shortDescription,
longDescription: req.body.longDescription,
price: req.body.itemPrice,
cardImage: cardImage,
images: urlArray,
});
userItem.save((err) => {
if (err) console.log(err);
return console.log("your prodcut has been added to database.");
});
console.log(urlArray);
console.log(`this is the second call ${cardImage}`);
res.redirect("/dashboard");
});
export default apiRouter;
export const config = {
api: {
bodyParser: false, // Disallow body parsing, consume as stream
},
};
I am not familiar with the coludinary API neither have a run this code, however assuming 'upload' returns a promise ( which it looks like it does) you can just await the results like so:
Note: you may have to inspect the result after await to make sure you are pulling the attribute the right away. I am unfamiliar with the API. But this is the general idea.
Also note that you may have to move you error checking outside to the result object. My example assumes the upload goes through successfully
apiRouter.post(arrayOfImages, async(req, res) => { //<< ---- Notice the async
const urlArray = [];
let cardImage = "";
const imagesArray = req.files["images"];
let result = await cloudinary.uploader.upload( //<<-- NOTE: await here
req.files["cardImage"][0].path,
{ folder: "cardImages" },
async (err, result) => {
if (err) console.log(err);
fs.unlinkSync(req.files["cardImage"][0].path);
}
)
cardImage = result.secure_url; //<<-- pull out the value you need from result after await
for (let i = 0; i < imagesArray.length; i++) {
let res = await cloudinary.uploader.upload( //<<-- NOTE: await here
imagesArray[i].path,
{ folder: "Images" },
async (err, result) => {
if (err) console.log(err);
fs.unlinkSync(req.files["images"][i].path);
}
);
urlArray.push(res.secure_url); //<<-- pull out the value you need from result after await
}
dataBaseConnection();
const userItem = new Item({
shortDescription: req.body.shortDescription,
longDescription: req.body.longDescription,
price: req.body.itemPrice,
cardImage: cardImage,
images: urlArray,
});
userItem.save((err) => {
if (err) console.log(err);
return console.log("your prodcut has been added to database.");
});
console.log(urlArray);
console.log(`this is the second call ${cardImage}`);
res.redirect("/dashboard");
});
So what happened here?
Since the upload function is asynchronous, your code after both the first upload and the loop, execute before the 'upload' actually completes. By using await keyword you can wait for the promise to return.
Related
Currently I'm working in a project where I'm trying to build a service in express which calls another two external EP. I have a trouble here, because express shows me an error that I can't understand. But I suppose, the way I'm working it should be wrong.
So
app.get("/items/:id", (req, res) => {
return request.get({
url: `https://myapi.com/${req.params.id}`,
json: true
},
(error, response) => {
if(error) {
return res.send("Error ocurred");
}
const itemDesc = request.get({ // Here I'm trying to do the second call and use it later
url: `https://myapi.com/${req.params.id}/description`,
json: true
},
(error, responseDesc) => {
return responseDesc
});
const itemDetails = response.body;
const strPrice = itemDetails.price.toString().split('.');
const numberPrice = parseInt(strPrice[0]);
const floatPrice = strPrice[1] ? parseInt(strPrice[1]) : 00;
return res.send({
id: itemDetails.id,
title: itemDetails.title,
price: {
currency: itemDetails.currency_id,
amount: numberPrice,
decimals: floatPrice,
},
picture: itemDetails.pictures[0].url,
condition: itemDetails.condition,
free_shipping: itemDetails.shipping.free_shipping,
sold_quantity: itemDetails.sold_quantity,
description: itemDesc // Here I'm using the variable of the previous request
});
});
});
Basically, the error I get is that I can't do two calls. I know that because if I remove the nested request, it works.
The error I get is the following:
My question is: Is there any way to do two external request inside the same method?
Thanks in advance
it's cleaner if you do it with async await in your case.
modify your code like this
app.get("/items/:id", async(req, res) => {
try {
const promise1 = fetch(`https://myapi.com/${req.params.id}`).then(data => data.json())
const promise2 = fetch(`https://myapi.com/${req.params.id}/description`)
const [itemDetails, itemDesc] = await Promise.all([promise1, promise2])
const strPrice = itemDetails.price.toString().split('.');
const numberPrice = parseInt(strPrice[0]);
const floatPrice = strPrice[1] ? parseInt(strPrice[1]) : 00;
res.send({
id: itemDetails.id,
title: itemDetails.title,
price: {
currency: itemDetails.currency_id,
amount: numberPrice,
decimals: floatPrice,
},
picture: itemDetails.pictures[0].url,
condition: itemDetails.condition,
free_shipping: itemDetails.shipping.free_shipping,
sold_quantity: itemDetails.sold_quantity,
description: itemDesc // Here I'm using the variable of the previous request
});
} catch (
res.send("Error ocurred")
)
});
I am trying to save to json the values returned from indeed api. I use indeed-scraper code from github https://github.com/rynobax/indeed-scraper
My code:
... required files ...
const parsedResults = []
indeed.query(queryOptions).then(response => {
response.forEach((res,i) => {
setTimeout(function(){
let url = res.url
let resultCount = 0
console.log(`\n Scraping of ${url} initiated...\n`)
const getWebsiteContent = async (url) => {
try {
const response = await axios.get(url)
const $ = cheerio.load(response.data)
...get scraped data...
parsedResults.push(metadata)
} catch (error) {
exportResults(parsedResults)
console.error(error)
}
}
getWebsiteContent(url)
}
, i*3000);
});
});
const outputFile = 'data.json'
const fs = require('fs');
const exportResults = (parsedResults) => {
fs.writeFile(outputFile, JSON.stringify(parsedResults, null, 4), (err) => {
if (err) {
console.log(err)
}
console.log(`\n ${parsedResults.length} Results exported successfully to ${outputFile}\n`)
})
}
parsedResults is not accessible in last portion of script, so to save as json file.
Any help appreciated!
Hello All!
I want to store users in folder as a file where file name is equal to user_id.
data
| - users
| - afdcab7e-b595-4a15-be0f-5f0337bd1317.json
| - fdfacb7i-bk00-4a15-be0f-5f0337b1d991.json
Each user has their own data for example
{
"_id": "afdcab7e-b595-4a15-be0f-5f0337bd1317",
"email": "test1#test.pl",
"password": "$2a$12$nIoudV7eXmJbU7e/P6YCbOccUkTbp8tcQKhyCEfmNOLihrW6QqPTC"
}
{
"_id": "fdfacb7i-bk00-4a15-be0f-5f0337b1d991",
"email": "test2#test.pl",
"password": "$2a$12$nIoudV7eXmJbU7e/P6YCbOccUkTbp8tcQKhyCEfmNOLihrW6QqPTC"
}
Then I want to read the contents of all files and put the objects into one temporary array.
exports.indexSignin = (req, res) => {
fs.readdir('./data/users', (err, files) => {
if (err) console.log(err);
const obj = [];
files.map((file) => {
fs.readFile(`./data/users/${file}`, 'utf-8', (err, data) => {
if (err) console.log(err);
obj.push(JSON.parse(data))
console.log(obj)
});
});
console.log(obj) //There obj is empty but I want an array
});
res.render('./index/index');
});
As an output I want to have an array saved into variable like this listed below:
[
{
"_id": "afdcab7e-b595-4a15-be0f-5f0337bd1317",
"email": "test1#test.pl",
"password": "$2a$12$nIoudV7eXmJbU7e/P6YCbOccUkTbp8tcQKhyCEfmNOLihrW6QqPTC"
},
{
"_id": "fdfacb7i-bk00-4a15-be0f-5f0337b1d991",
"email": "test2#test.pl",
"password": "$2a$12$nIoudV7eXmJbU7e/P6YCbOccUkTbp8tcQKhyCEfmNOLihrW6QqPTC"
}
]
Do you have any ideas how to use mapped data externally or refactor it into a better way?
Thank You guys!
I solved my problem with Your help.
Here is a working example which I needed:
exports.indexSignin = (req, res) => {
const readFiles = async () => {
try {
const path = "./data/users"
const files = await readdir(path);
const fileAwaits = files.map(file => readFile(`${path}/${file}`, "utf8"))
const contents = await Promise.all(fileAwaits)
return contents.map(co => JSON.parse(co))
} catch (err) {
throw err;
}
}
readFiles()
.then(test => console.log(test))
.catch(err => console.log('Directory not found.'))
.finally(() => console.log('Rest of the code...'));
res.render('./index/index');
// or IIFY which do the same
(async () => {
try {
const test = await readFiles();
console.log(test);
} catch (err) {
console.log('Directory not found.');
}
console.log('Rest of the code...')
res.render('./index/index');
})()
};
This is the same code of mine that works. I hope that helps you.
const { readdir, readFile } = require("fs/promises");
const readFiles = async () => {
try {
const path = "./test"
const files = await readdir(path);
console.log(files)
const fileAwaits = files.map(file => readFile(`${path}/${file}`, "utf8"))
const contents = await Promise.all(fileAwaits)
console.log(contents.map(co => JSON.parse(co)))
} catch (err) {
console.error(err)
}
}
readFiles()
So if you want to use this inside your API handlers change it as bellow:
exports.indexSignin = async (req, res) => {
try {
const path = "./test" // replace path by your own
const files = await readdir(path);
console.log(files)
const fileAwaits = files.map(file => readFile(`${path}/${file}`, "utf8"))
const contents = await Promise.all(fileAwaits)
const arrayContent = contents.map(co => JSON.parse(co))
console.log(arrayContent);
} catch (err) {
console.error(err)
}
res.render('./index/index');
});
I’m a bit confused with how to proceed. I am using Archive ( node js module) as a means to write data to a zip file. Currently, I have my code working when I write to a file (local storage).
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream(__dirname + '/example.zip');
var archive = archiver('zip', {
zlib: { level: 9 }
});
archive.pipe(output);
archive.append(mybuffer, {name: ‘msg001.txt’});
I’d like to modify the code so that the archive target file is an AWS S3 bucket. Looking at the code examples, I can specify the bucket name and key (and body) when I create the bucket object as in:
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myMsgArchive.zip' Body: myStream};
s3.upload( params, function(err,data){
…
});
Or
s3 = new AWS.S3({ parms: {Bucket: ‘myBucket’ Key: ‘myMsgArchive.zip’}});
s3.upload( {Body: myStream})
.send(function(err,data) {
…
});
With regards to my S3 example(s), myStream appears to be a readable stream and I am confused as how to make this work as archive.pipe requires a writeable stream. Is this something where we need to use a pass-through stream? I’ve found an example where someone created a pass-through stream but the example is too terse to gain proper understanding. The specific example I am referring to is:
Pipe a stream to s3.upload()
Any help someone can give me would greatly be appreciated. Thanks.
This could be useful for anyone else wondering how to use pipe.
Since you correctly referenced the example using the pass-through stream, here's my working code:
1 - The routine itself, zipping files with node-archiver
exports.downloadFromS3AndZipToS3 = () => {
// These are my input files I'm willing to read from S3 to ZIP them
const files = [
`${s3Folder}/myFile.pdf`,
`${s3Folder}/anotherFile.xml`
]
// Just in case you like to rename them as they have a different name in the final ZIP
const fileNames = [
'finalPDFName.pdf',
'finalXMLName.xml'
]
// Use promises to get them all
const promises = []
files.map((file) => {
promises.push(s3client.getObject({
Bucket: yourBubucket,
Key: file
}).promise())
})
// Define the ZIP target archive
let archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
})
// Pipe!
archive.pipe(uploadFromStream(s3client, 'someDestinationFolderPathOnS3', 'zipFileName.zip'))
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
})
// Good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
})
// The actual archive is populated here
return Promise
.all(promises)
.then((data) => {
data.map((thisFile, index) => {
archive.append(thisFile.Body, { name: fileNames[index] })
})
archive.finalize()
})
}
2 - The helper method
const uploadFromStream = (s3client) => {
const pass = new stream.PassThrough()
const s3params = {
Bucket: yourBucket,
Key: `${someFolder}/${aFilename}`,
Body: pass,
ContentType: 'application/zip'
}
s3client.upload(s3params, (err, data) => {
if (err)
console.log(err)
if (data)
console.log('Success')
})
return pass
}
The following example takes the accepted answer and makes it work with local files as requested.
const archiver = require("archiver")
const fs = require("fs")
const AWS = require("aws-sdk")
const s3 = new AWS.S3()
const stream = require("stream")
const zipAndUpload = async () => {
const files = [`test1.txt`, `test2.txt`]
const fileNames = [`test1target.txt`, `test2target.txt`]
const archive = archiver("zip", {
zlib: { level: 9 } // Sets the compression level.
})
files.map((thisFile, index) => {
archive.append(fs.createReadStream(thisFile), { name: fileNames[index] })
})
const uploadStream = new stream.PassThrough()
archive.pipe(uploadStream)
archive.finalize()
archive.on("warning", function (err) {
if (err.code === "ENOENT") {
console.log(err)
} else {
throw err
}
})
archive.on("error", function (err) {
throw err
})
archive.on("end", function () {
console.log("archive end")
})
await uploadFromStream(uploadStream)
console.log("all done")
}
const uploadFromStream = async pass => {
const s3params = {
Bucket: "bucket-name",
Key: `streamtest.zip`,
Body: pass,
ContentType: "application/zip"
}
return s3.upload(s3params).promise()
}
zipAndUpload()
I am trying to find a way to get the currently logged in user and than append them to a JSON file. Below is my code to first read the dir, then get the most recent file, return it and then append the current user that is logged in.
I can append a string to the file but when trying to perform req.user it states
Cannot read property 'user' of undefined
What would I need to include in this file so that it knows what user is?
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
path = require("path");
let getFileAddUser = () => {
let filePath = '../automation_projects/wss-automation-u/results/temp/';
fs.readdir(filePath, (err, files) => {
if (err) { throw err; }
let file = getMostRecentFile(files, filePath);
console.log(file);
fs.readFile(filePath + file, 'utf8', (err, data) => {
let json = JSON.parse(data);
if(err){
console.error(err);
return;
} else {
//Un-comment to write to most recent file.
//==================================================
//This should find the currently logged in user and append them to the most recent file found.
json.currentuser = req.user;
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error){
console.error(error);
return;
} else {
console.log(json);
}
});
//==================================================
console.log(data);
}
});
});
};
//Get the most recent file from the results folder.
function getMostRecentFile(files, path) {
let out = [];
files.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
module.exports = getFileAddUser;
Thanks to a knowledgeable co-worker and some further research we were able to get this working. I'd like to share the code we came up with to append the currently logged in user to our results file. You will also notice we got some help using the Ramada.js library.
let fs = require("fs"),
express = require("express"),
_ = require("underscore"),
User = require("./models/user"),
r = require("ramda"),
path = require("path");
//This will be our function to get the most recent file from our dir and
//return it to us. We than user this function below.
function getMostRecentFile(files, path) {
let out = [];
let f = r.tail(files);
console.log(files);
f.forEach(function(file) {
let stats = fs.statSync(path + "/" +file);
if(stats.isFile()) {
out.push({"file":file, "mtime": stats.mtime.getTime()});
}
});
out.sort(function(a,b) {
return b.mtime - a.mtime;
})
return (out.length>0) ? out[0].file : "";
}
//Passing in 'u' as a argument which can than be used in a route and pass in
//anything that we want it to be. In our case it was the currently logged
//in user.
let getUser = (u) => {
let user = u;
let filePath = '../automation_projects/wss-automation-u/results/temp/';
//Comment above and uncomment below for testing locally.
// let filePath = "./temp/";
let file = "";
//Below we read our dir then get the most recent file using the
//getMostRecentfile function above.
read_directory(filePath).then( files => {
file = getMostRecentFile(files, filePath)
console.log(file);
return(read_file(filePath + file))
}).then( x => {
// Here we parse through our data with x representing the data that we
//returned above.
let json = JSON.parse(x);
return new Promise(function(resolve, reject) {
json.currentuser = u;
//And finally we write to the end of the latest file.
fs.writeFile(filePath + file, JSON.stringify(json), (error) => {
if(error) reject(error);
else resolve(json);
// console.log(json);
});
});
});
}
let read_directory = (path) => {
return new Promise((resolve, reject) => {
fs.readdir(path, (err, items) => {
if (err){
return reject(err)
}
return resolve([path, ...items])
})
})
}
let read_file = (path) => {
return new Promise((resolve, reject) => {
fs.readFile(path, "utf8", (err, items) => {
if (err){
return reject(err)
}
return resolve(items)
})
})
}
module.exports = getUser;
Than below is an example route with how to use the getUser module. You will want to require it like you do everything else with node.js and dependencies. Hope this helps someone in the future.
let getUser = require("getuser");
//Make a route to use the getUser module and pass in our argument value.
app.get("/", (req, res) => {
//With in the get user function pass in whatever you want to equal 'u' from the getuser module.
getUser(req.user.username);
res.render("index", { username: req.user });
});