issues in downloading the pdf files using axios in reactjs (context api) - javascript

Can any one please help me out on how to download the pdf files when reactjs(using context api) is used as frontend(using axios),as on server side every thing is fine , if i click on download link - following response is shown in console Click here to see image
this is the client side code making req:
const pdfFileDownload = async (dlIdInfo) => {
const { id } = dlIdInfo;
console.log(`pdf file id`, id);
try {
const res = await axios({
method: "GET",
url: `/pub/pdfdload/${id}`,
responseType: 'blob',
});
console.log(res.data);
} catch (err) {
console.log(err.response);
}
}
this is server side code:
router.get("/pdfdload/:id", async (req, res, next) => {
const fileId = req.params.id;
try {
const fetchedFileToDl = await FILESDB.findById(fileId);
const fileArr = fetchedFileToDl.pdfFiles;
fileArr.map(filename => {
const editedFileName = filename.split("/")[1];
const filePath = path.join(__dirname, "../", "public", editedFileName);
const files = fs.createReadStream(filePath);
res.setHeader("Content-Type", "application/pdf");
res.setHeader('Content-Disposition', 'inline; filename="' + editedFileName + '"');
files.pipe(res);
});
} catch (err) {
console.log(err);
}});

Here is how I usually do:
const downloadFileAsPDF = (name, data) => {
const url = window.URL.createObjectURL(new Blob([res.data], {type: `application/pdf`}));
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', name);
link.click();
window.URL.revokeObjectURL(url);
}
const pdfFileDownload = async (dlIdInfo) => {
const { id } = dlIdInfo;
console.log(`pdf file id`, id);
try {
const res = await axios({
method: "GET",
url: `/pub/pdfdload/${id}`,
responseType: 'blob',
});
downloadFileAsPDF('file.pdf', res.data);
} catch (err) {
console.log(err.response);
}
}
It's really simple but it get the job done.

Related

Node.js AWS Lambda putObject doesn't seem to finish

I have a Lambda function that is meant to download a directory of files from s3, convert them, delete the old files, and upload the new output files back to s3. The output for each file will be at least one file and a folder.
Everything seems to be working as intended, except for the upload. No errors are thrown, it just ends without putting.
I'm a novice, so feel free to point out I've done it all wrong.
exports.handler = async ({ dirName }) => {
// const jsonIn = JSON.parse(event.body);
// const dirName = jsonIn.dirName;
const localDir = `/tmp/${dirName}`;
const params = {
Bucket: 'to-pdf-test',
Delimiter: '/',
Prefix: dirName + '/',
StartAfter: dirName + '/'
};
var s3List;
var localList = [];
execSync(`mkdir ${localDir}`);
try {
s3List = await s3.listObjectsV2(params).promise();
} catch (e) {
throw e;
}
await Promise.all(
s3List.Contents.map(async (file) => {
let f = await getFiles(file);
localList.push(f);
})
).then(res => {console.log('Get Successful' + res) } )
.catch(err => {console.log('error' + err) } );
await Promise.all(
localList.map(async (file) => {
convertFile(file);
})
).then(res => {console.log('Convert Successful' + res) } )
.catch(err => {console.log('error' + err) } );
dirSync(localDir, async (filePath, stat) => {
let bucketPath = filePath.substring(5);
let uploadParams = { Bucket: 'to-pdf-test',
Key: `${bucketPath}`,
Body: fs.readFileSync(filePath) };
console.log('DS fPath ' + filePath);
console.log('DS bPath ' + bucketPath);
console.log(uploadParams.Body);
try {
let res = await s3.putObject(uploadParams).promise();
console.log('Upload Complete', res);
} catch (e) {
console.log('Error', e);
}
});
};
async function getFiles(file) {
let filePath = `/tmp/${file.Key}`;
let fileParams = {
Bucket: 'to-pdf-test',
Key: file.Key
};
try {
const { Body: inputFileBuffer } = await s3.getObject(fileParams).promise();
fs.writeFileSync(filePath, inputFileBuffer);
} catch (e) {
throw (e);
}
return filePath;
}
function convertFile(file) {
const noPath = getFilename(file);
const fPath = getFilePath(file);
if (path.extname(noPath) === '.msg') {
execSync(`cd ${fPath} && ${command} ${noPath}`);
} else {
console.log(`${noPath} not run. Not .msg`);
}
fs.unlinkSync(file);
}
function getFilename(fullPath) {
return fullPath.replace(/^.*[\\\/]/, '');
}
function getFilePath(fullPath) {
return fullPath.substring(fullPath.lastIndexOf('/'), 0);
}
function dirSync(dirPath, callback) {
fs.readdirSync(dirPath).forEach((name) => {
var filePath = path.join(dirPath, name);
var stat = fs.statSync(filePath);
if (stat.isDirectory()) {
dirSync(filePath, callback);
} else {
callback(filePath, stat);
}
});
}
I had the upload working in a previous version of this function, so thanks to this post for when it was working.
My solution for the moment - Read the local directory separately, push the paths of the files to localList then .map the array with all the paths to upload them.
localList = [];
//read dir and push to localList array
await dirSync(localDir, (filePath, stat) => {
localList.push(filePath);
});
console.log(localList);
await Promise.all(
localList.map( async (file) => {
let bucketPath = file.substring(5);
let uploadParams = {
Bucket: 'to-pdf-test',
Key: bucketPath,
Body: fs.readFileSync(file) };
console.log('Uploading', file);
await s3.putObject(uploadParams).promise()
.then((res) => {console.log('Upload Successful', bucketPath) } )
.catch((err) => {console.log('error' + err) } );
})
);
If there is better (or proper) way to do this, someone let me know :)

How to save pdf to Cloudant

I want to save the pdf to Cloudant. With the code below, I get an error opening the Attachment in Cloudant. "An error was encountered when processing this file"
I can put fake string data in the "._attachments[name].data" field and it will save.
The Cloudant docs say the data content needs to be in base64 and that is what I am attempting.
Cloudant says "The content must be provided by using BASE64 representation"
function saveFile() {
var doc = {};
var blob = null;
//fileName is from the input field model data
var url = fileName;
fetch(url)
.then((r) => r.blob())
.then((b) => {
blob = b;
return getBase64(blob);
})
.then((blob) => {
console.log(blob);
let name = url._rawValue.name;
doc._id = "testing::" + new Date().getTime();
doc.type = "testing attachment";
doc._attachments = {};
doc._attachments[name] = {};
doc._attachments[name].content_type = "application/pdf";
doc._attachments[name].data = blob.split(",")[1];
console.log("doc: ", doc);
})
.then(() => {
api({
method: "POST",
url: "/webdata",
auth: {
username: process.env.CLOUDANT_USERNAME,
password: process.env.CLOUDANT_PASSWORD,
},
data: doc,
})
.then((response) => {
console.log("result: ", response);
alert("Test has been submitted!");
})
.catch((e) => {
console.log("e: ", e);
alert(e);
});
console.log("finished send test");
});
}
function getBase64(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onerror = (error) => reject(error);
});
}
any ideas?
Thanks
CouchDB, and by extension Cloudant, has a means of handling a "multi-part" request where the JSON document and the attachments are sent in the same request. See https://docs.couchdb.org/en/3.2.2/api/document/common.html#put--db-docid
They are modelled in CouchDB's Nano project here: https://www.npmjs.com/package/nano#multipart-functions
const fs = require('fs');
fs.readFile('rabbit.png', (err, data) => {
if (!err) {
await alice.multipart.insert({ foo: 'bar' }, [{name: 'rabbit.png', data: data, content_type: 'image/png'}], 'mydoc')
}
});
Alternatively, you could write the document first and add the attachment in a supplementary request. Using the current Cloudant SDKs:
write document https://cloud.ibm.com/apidocs/cloudant?code=node#putdocument
write attachment https://cloud.ibm.com/apidocs/cloudant?code=node#putattachment
const doc = {
a: 1,
b: 2
}
const res = await service.putDocument({
db: 'events',
docId: 'mydocid',
document: doc
})
const stream = fs.createReadStream('./mypdf.pdf')
await service.putAttachment({
db: 'events',
docId: 'mydocid',
rev: res.result.rev, // we need the _rev of the doc we've just created
attachmentName: 'mypdf',
attachment: stream,
contentType: 'application/pdf'
})
I found out I was doing too much to the PDF file. No need to make to blob then convert to base64.
Only convert to base64.
async function sendFiles() {
try {
const url = fileName;
const doc = {};
doc._attachments = {};
doc._id = "testing::" + new Date().getTime();
doc.type = "testing attachment";
for (let item of url._value) {
const blob2 = await getBase64(item);
let name = item.name;
doc._attachments[name] = {};
doc._attachments[name].content_type = item.type;
doc._attachments[name].data = blob2.split(",")[1];
}
const response = await api({
method: "POST",
url: "/webdata",
data: doc,
});
} catch (e) {
console.log(e);
throw e; // throw error so caller can see the error
}
console.log("finished send test");
fileName.value = null;
}
function getBase64(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onerror = (error) => reject(error);
});
}
This works for me.

My request works in Postman ut not in browser (React, Node, Cloudinary)

I have a page where i made the backend in NodeJs + MongoDb and the frontend with React. In the backend i have a middleware that i use to upload images to Cloudinary. For example one route is for create a new pet and when i do the post request with Postman everything goes good, the new pet is created well in the db and also have the url of Cloudinary in the image place. The problem come when i try to do the same with a form in react... Everything goes "good" too, but in the image place (where with postman i have the clodinary url), now is empty...
The node controller code:
const petCreatePost = async(req, res, next) => {
const { type, name, avatar, age, sex, breed, size, isVaccinated, isSterilized, isDewormed, microchip, province, shelter, status } = req.body;
try {
const newPet = new Pet({
type,
name,
avatar: req.imageUrl ? req.imageUrl : '',
age,
sex,
breed,
size,
isVaccinated,
isSterilized,
isDewormed,
microchip,
province,
shelter,
status
});
const createdPet = await newPet.save();
return res.status(200).json('Mascota creada correctamente', { pet: createdPet });
} catch (error) {
return next(error);
}
}
Cloudinary middleware:
const multer = require('multer');
const path = require('path');
const fs = require('fs');
const cloudinary = require('cloudinary').v2
const ACCEPTED_FILE = [ 'image/jpg', 'image/jpeg', 'image/png' ];
const fileFilter = (req, file, cb) => {
if(!ACCEPTED_FILE.includes(file.mimetype)) {
const error = new Error ('Extensión del archivo inválida.')
error.status = 400;
return cb(error);
}
return cb(null, true);
};
const storage = multer.diskStorage({
filename: (req, file, cb) => {
const fileName = `${Date.now()}-${file.originalname}`;
cb(null, fileName);
},
destination: (req, file, cb) => {
const directory = path.join(__dirname, '../public/uploads');
cb(null, directory);
}
});
const upload = multer({
storage,
fileFilter,
});
const uploadToCloudinary = async (req, res, next) => {
try {
console.log('req', req);
if(req.file) {
const path = req.file.path;
const image = await cloudinary.uploader.upload(path);
req.imageUrl = image.secure_url;
console.log('image url', req.imageUrl);
return next();
} else {
return next();
}
} catch (error) {
return next(error);
}
};
module.exports = { upload, uploadToCloudinary };
How i use the middleware:
router.post('/new', [upload.single('avatar'), uploadToCloudinary], controller.petCreatePost);
The react component:
import React, { useContext } from 'react';
export const NewPet = () => {
const submitForm = async (e) => {
e.preventDefault();
const { type, name, age, avatar, sex, breed, size, isVaccinated, isSterilized, isDewormed, microchip, province, status } = e.target;
const form = {
type: type.value,
name: name.value,
age: age.value,
sex: sex.value,
breed: breed.value,
size: size.value,
isVaccinated: isVaccinated.value,
isSterilized: isSterilized.value,
isDewormed: isDewormed.value,
microchip: microchip.value,
province: province.value,
status: status.value
};
// const form = new FormData();
// form.append('type', type.value);
// form.append('name', name.value);
// form.append('age', age.value);
// form.append('sex', sex.value);
// form.append('breed', breed.value);
// form.append('size', size.value);
// form.append('isVaccinated', isVaccinated.value);
// form.append('isSterilized', isSterilized.value);
// form.append('isDewormed', isDewormed.value);
// form.append('microchip', microchip.value);
// form.append('province', province.value);
// form.append('status', status.value);
// form.append('avatar', imagenPrueba);
try {
const pet = await newPet(form);
console.log('pet', pet);
} catch (err) {
console.log(err);
}
}
The part of the code where is commented is an alternative that i try to use, because i'm sending a file and i have to use a FormData, but is not working too. I also checked that the form have the enctype="multipart/form-data".
And by last the "newPet" function that i use to connect to the back:
export const newPet = async(form) => {
const req = await fetch(newPetUrl, {
method: "POST",
headers: {
Accept: "application/json",
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
},
credentials: "include",
body: JSON.stringify(form),
});
const response = await req.json(form);
if (!req.ok) {
throw new Error(response.message);
}
return response;
};
I hope someone can help me.. Thanks!
You need to await the url from cloudinary. I had this problem too

Node Script wont stop after async await in a loop

const axios = require("axios");
const Parser = require("./utils/parser");
const WebStore = require("./models/webstore");
const mongoose = require("mongoose");
require("./db/connection");
require("dotenv").config();
const url = "";
const app_secret = process.env.APP_SECRET;
const buff = new Buffer(app_secret);
const base64 = buff.toString("base64");
axios
.get(url, {
headers: {
Accept: "application/json",
Authorization: `Basic ${base64}`,
},
params: {
from_date: "2020-02-19",
to_date: "2021-02-21",
},
})
.then(async (response) => {
const data = response.data.split("\n");
const events = [];
data.forEach((point) => {
try {
const dat = Parser.toDB(JSON.parse(Parser.parser(point)));
events.push(dat);
} catch (err) {}
});
events.forEach(async (event) => {
try {
const e = new WebStore(event);
await e.save();
console.log("saved");
} catch (err) {
console.log("fail");
}
});
})
.catch((err) => {
console.error(err);
});
So After the execution the script outputs saved multiple times but the script is not closed itself.
The data is stored in the database after the execution I have tried mongoose.disconnect() and I've also tried mongoose.connection.close(). How do I resolve this issue ?

Firebase function storage POST image issue

I have encountered a problem when following a maximilian schwarzmüller course, which has otherwise been great: https://www.youtube.com/watch?v=qZ1EFnFOGvE
The image logs in the Firebase console as uploaded, recognises the type of file/size etc. But continually loads and never displays the image. I use a post request in POSTMAN to upload the image.
When I upload manually to firebase on their UI, everything works fine.
My code:
const functions = require("firebase-functions");
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
const cors = require("cors")({ origin: true });
const Busboy = require("busboy");
const fs = require("fs");
const gcconfig = {
projectId: "REDACTED",
keyFilename: "REDACTED"
};
const gcs = require("#google-cloud/storage")(gcconfig);
//
exports.onFileChange = functions.storage.object().onFinalize(event => {
const object = event.data;
const bucket = object.bucket;
const contentType = object.contentType;
const filePath = object.name;
console.log("File change detected, function execution started");
if (object.resourceState === "not_exists") {
console.log("We deleted a file, exit...");
return;
}
if (path.basename(filePath).startsWith("resized-")) {
console.log("We already renamed that file!");
return;
}
const destBucket = gcs.bucket(bucket);
const tmpFilePath = path.join(os.tmpdir(), path.basename(filePath));
const metadata = { contentType: contentType };
return destBucket
.file(filePath)
.download({
destination: tmpFilePath
})
.then(() => {
return spawn("convert", [tmpFilePath, "-resize", "500x500", tmpFilePath]);
})
.then(() => {
return destBucket.upload(tmpFilePath, {
destination: "resized-" + path.basename(filePath),
metadata: metadata
});
});
});
exports.uploadFile = functions.https.onRequest((req, res) => {
cors(req, res, () => {
if (req.method !== "POST") {
return res.status(500).json({
message: "Not allowed"
});
}
const busboy = new Busboy({ headers: req.headers });
let uploadData = null;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename);
uploadData = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
const bucket = gcs.bucket("REDACTED");
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
res.status(500).json({
error: err
});
});
});
busboy.end(req.rawBody);
});
});
My security rules:
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write:if true;
}
}
}

Categories