pdf encryption and uploading aws S3 not working - javascript

In this code i am trying to create pdf file and encrypting, i am able to create pdf encryption, in this code i am creating one Tempfile in my local machine after that i am encrypting that file and in here 'qpdf --encrypt Decorpot test 40 -- ${tempFileName} --replace-input' i am just replacing the non encrypted file with encrypted file and its storing correctly as a encrypted file in local machine,but when i am going to upload in s3 aws only non encryted file is added. i tried so many methods..please help me.. ThankYou
module.exports.generateChecklistPdf2 = function () {
return new Promise((resolve, reject) => {
let htmlFilePath = '';
htmlFilePath = path.join(__dirname, '../static/html/checkList.html');
htmlFilePath = htmlFilePath.replace(new RegExp(/\\/g), '/');
pdfFilePath = path.join(__dirname, '../uploads/' + Math.ceil(Math.random() *
810000) + '.pdf');
pdfFilePath = pdfFilePath.replace(new RegExp(/\\/g), '/');
let date = new Date();
return ejs.renderFile(htmlFilePath, { moment: moment, date }, {}, function (err,
htmlFile) {
if (err) return console.log(err);
var tempFileName = pdfFilePath;
var file = fs.createWriteStream(tempFileName);
assetsPath = path.join(__dirname, '../static/');
assetsPath = assetsPath.replace(new RegExp(/\\/g), '/');
options = {
base: 'file:///' + assetsPath,
}
setTimeout(() => {
var cmd = `qpdf --encrypt Decorpot test 40 -- ${tempFileName}
--replace-input`;
exec(cmd, function (err) {
if (err) {
console.error('Error occured: ' + err);
} else {
console.log('PDF encrypted :)', tempFileName);
}
});
}, 1000)
return pdf.create(htmlFile, options).toStream(function (err, stream) {
stream.pipe(file).on('close', () => {
formData = {
customerName: "customername",
file: fs.createReadStream(pdfFilePath)
}
let url = '';
if (process.env.NODE_ENV == 'production') {
url = 'http://13.233.26.162:5003/api/file/upload';
} else {
url = 'http://localhost:5003/api/file/upload';
}
return request.post({ url: url, formData: formData },
function (err, res) {
if (err) return console.log(err);
console.log(res.body)
let resolveObject = {};
resolveObject.pdfUrl = res.body;
resolve(resolveObject);
});
});
});
});
});
}

Related

Uploading Multiple Files to Google Cloud Storage using For Loop

I am trying to upload multiple files to Google Cloud Storage. I am using a for loop to for each file in the list of files which I want to upload.
However, the problem is that the for loop does not pause to wait for the upload to finish before moving on to the next upload. It eventually uploads them, however, the for loop finishes earlier which then sends back to the client the empty urlList.
How do I make it pause and wait for each upload process before moving on to the next file in the for loop?
const processFile = require('../middleware');
const { format } = require('util');
let uuidv4 = require('uuid/v4');
const Cloud = require('#google-cloud/storage');
const { Storage } = Cloud;
const storage = new Storage({
keyFilename: './xxx.json',
projectId: 'xxx'
});
const bucket = storage.bucket('xxx');
exports.upload = async (req, res) => {
const urlList = [];
await processFile(req, res); //multer
for (var i = 0; i < req.files.length; i++) {
if (!req.files[i]) {
return res.status(400).send({ message: 'Please upload a file!' });
}
const { originalname, buffer } = req.files[i];
var filename = originalname
.toLowerCase()
.split(' ')
.join('-');
filename = uuidv4() + '-' + filename;
console.log(filename);
const blob = bucket.file(filename);
const blobStream = blob.createWriteStream({
resumable: false
});
blobStream.on('error', err => {
res.status(500).send({ message: err.message });
});
blobStream.on('finish', async data => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
urlList.push(publicUrl);
try {
await bucket.file(filename).makePublic();
} catch (err) {
console.log('failed to make it public');
reject(err);
}
});
blobStream.end(buffer);
}
return res.status(200).send({
message: 'Uploaded the files successfully',
url: urlList
});
};
Just put your "upload" code in a Promise that you can await in the loop. Othervise by using on the code inside of it will not follow the for loop. By using such event based code your for loop will just go trough it and can't await it to finish. This should do the trick:
const uploadFile = (f) => {
return new Promise((resolve, reject) => {
const { originalname, buffer } = f;
var filename = originalname.toLowerCase().split(" ").join("-");
filename = uuidv4() + "-" + filename;
console.log(filename);
const blob = bucket.file(filename);
const blobStream = blob.createWriteStream({
resumable: false,
});
blobStream.on("error", (err) => {
res.status(500).send({ message: err.message });
reject(err);
});
blobStream.on("finish", async (data) => {
const publicUrl = format(
`https://storage.googleapis.com/${bucket.name}/${blob.name}`
);
try {
await bucket.file(filename).makePublic();
resolve(publicUrl);
} catch (err) {
console.log("failed to make it public");
reject(err);
}
});
blobStream.end(buffer);
});
};
exports.upload = async (req, res) => {
const urlList = [];
await processFile(req, res); //multer
for (var i = 0; i < req.files.length; i++) {
if (!req.files[i]) {
return res.status(400).send({ message: "Please upload a file!" });
}
const publicUrl = await uploadFile(req.files[i]);
urlList.push(publicUrl);
}
return res.status(200).send({
message: "Uploaded the files successfully",
url: urlList,
});
};

How to properly save image to s3 with nodejs that can be displayed?

I am saving an image from url to s3 bucket. The image is saved but when opening the object url of that image, it is not displaying the image properly except a small square. The image size is not zero either. I know there is something wrong but not sure where it is and how to fix it. Any suggestion would be great.
Here is my code:
const AWS = require('aws-sdk');
const axios = require('axios');
AWS.config.update({
accessKeyId: 'xxxxxx',
secretAccessKey: 'xxx',
});
const S3 = new AWS.S3();
const url = '//../path-to-image';
async function uploadToS3(uri, cb) {
const image = await axios.get(uri);
const finalImage = await new Buffer.from(image.data);
S3.putObject(
{
Bucket: 'my-images',
Key: Date.now() + '.jpg',
ACL: 'public-read',
ContentType: 'image/jpeg',
Body: finalImage,
},
function (err) {
if (err) {
console.error('Image upload to s3 failed: ', err);
}
console.log('Upload to s3 successfull');
}
);
}
const handleError = (err, d) => {
if (err) return new Error(err.stack);
else console.log('Operation successfull', d);
};
uploadToS3(url, handleError);
Although I have written in Angular but basic concept is same.
init() {
let tawcon:TVAwsConfig = new TVAwsConfig(); // make your own config file
this.awsconfig = tawcon.aswcon;
this.bucketName = this.awsconfig.bucketName;
AWS.config.region = this.awsconfig.bucketRegion;
AWS.config.apiVersions = {
s3:this.awsconfig.apiVersions
}
this.s3Object = new AWS.S3();
}
public publishPageToS3(params:any,
loader:LoaderService,
callBkFunc:Function):void{
let {foldername, compref, filename, body} = params;
loader.showLoader = true;
var objectParams = {
Bucket: this.bucketName,
Key: foldername+'/'+compref+'/'+filename, // write your own folder structure.
Body: body
};
this.s3Object.upload (objectParams, function (err, data) {
if (err) {
loader.showLoader = false;
callBkFunc(err, true)
return;
}
if (data) {
loader.showLoader = false;
callBkFunc("Success", false);
}
});
}

response.pipe(file) throws error on Windows Server | Works fine on Ubuntu

I'm trying to download a file and move it to a specific folder. It works fine on my local Linux machine but throws an error when executed on Windows Server:
router.post('/pullDraftFile', (req, res) => {
if (!req.query.reference && !req.query.draftFileId) {
return res.sendStatus(502);
}
const reference = req.query.reference;
const fileUrl = `${s3DraftBucketURL}${req.query.draftFileId}`;
http.get(fileUrl, function (response) {
new Promise((resolve, reject) => {
const contentDisposition = response.headers['content-disposition'];
console.log(response.headers['content-type']);
resolve(contentDisposition && /^attachment/i.test(contentDisposition) ?
req.query.draftFileId + '|' + contentDisposition.substring(contentDisposition.indexOf('=') + 1, contentDisposition.length) : req.query.draftFileId + '|');
}).then((fileName) => {
if (!fs.existsSync(`${watchFolders}/${reference}`)) {
fs.mkdirSync(`${watchFolders}/${reference}`);
}
const downloadedFile = `downloads/${fileName}`;
const watchFile = `${watchFolders}/${reference}/${fileName}`;
const file = fs.createWriteStream(downloadedFile);
var stream = response.pipe(file);
stream.on('finish', () => {
console.log('File Download Successfully');
fs.rename(downloadedFile, watchFile, function (err) {
if (err) console.log(err);
console.log('File Moved Successfully!');
});
});
});
});
res.sendStatus(200);
});
Let me know if I missed anything.
I replaced the separator from '|' to ','.

Get URL of uploaded file to S3 (after the file has been uploaded)

Given a link that when the user hits it a PDF is downloaded ,
I want to upload the file to S3 and then get an Amazon S3 URL that would be public (I don't want the user to see the real Link , so that's why I'd rather upload it to S3).
Consider the code :
module.exports = class S3Uploader {
uploadPdfFromURLToS3 = urlToUpload => {
import aws from "aws-sdk";
const request = require("request");
const s3 = new aws.S3();
const config = require("config");
var uuidv4 = require("uuid/v4");
var filename = uuidv4() + ".pdf";
aws.config.update({
accessKeyId: config.get("-------------"),
secretAccessKey: config.get("-----------------")
});
var promise = new Promise((resolve, reject) => {
return request({ url: urlToUpload, encoding: null }, function(
err,
res,
body
) {
if (err) return reject({ status: 500, error: err });
return resolve({ status: 200, body: body });
});
});
promise.then(pdf => {
if (pdf.status == 200) {
s3.putObject(
{
Bucket: "-----Your-Bucket-Name",
Body: pdf.body,
Key: filename,
ACL: "public-read"
},
(err, data) => {
if (err) console.log(err);
else
{
console.log("uploaded");
// Get the S3 Public link ????
}
}
);
}
});
};
};
How can I get the link after the file has been uploaded successfully , in the callback ?
You can build up the url using string concatentation.
https://your-bucket-name.s3-eu-west-1.amazonaws.com/filename
Make sure you are using the correct region.
You can try adding $(data.Location) in your console.log("uploaded") line.
console.log("uploaded. $(data.Location)");
**TRY THIS **The main change is in s3.putObject()
module.exports = class S3Uploader {
uploadPdfFromURLToS3 = urlToUpload => {
import aws from "aws-sdk";
const request = require("request");
const s3 = new aws.S3();
const config = require("config");
var uuidv4 = require("uuid/v4");
var filename = uuidv4() + ".pdf";
aws.config.update({
accessKeyId: config.get("-------------"),
secretAccessKey: config.get("-----------------")
});
var promise = new Promise((resolve, reject) => {
return request({ url: urlToUpload, encoding: null }, function(
err,
res,
body
) {
if (err) return reject({ status: 500, error: err });
return resolve({ status: 200, body: body });
});
});
promise.then(pdf => {
if (pdf.status == 200) {
s3.putObject(
{
Bucket: "-----Your-Bucket-Name",
Body: pdf.body,
Key: filename,
ACL: "public-read"
},async(err,data)=>{if(err){console.log("error")}
else
console.log(data.location) //get pdf url
}
);
}
});
};
};

Decoding base64 image email attachment retrieved using imap nodejs

I'm attempting to retrieve an e-mail attachment image using Node.js imap that can be found here: https://github.com/mscdex/node-imap
After retrieving the image I want to save it to file and then save the name in the MySQL database so I can retrieve it in the front end using EJS.
I've gotten as far as retrieving the e-mail attachment image and attempting to decode it and then save it. Unfortunately when opened from the folder it says: "It appears that we don't support this file format".
Upon further investigation, if I convert it to a base64 string using this online tool: https://www.motobit.com/util/base64-decoder-encoder.asp and then go to a base64 to image converter (https://codebeautify.org/base64-to-image-converter), it shows the image just fine.
I'm skeptical that my code is actually converting the image to base64 as the file size goes up from 250kb to 332kb.
I am unsure of how to proceed to allow the photo to be properly decoded for viewing as the original .jpeg image.
var fs = require('fs'), fileStream;
var {Base64Encode} = require('base64-stream');
const Imap = require('imap'),
inspect = require('util').inspect;
var imap = new Imap({
user: 'gmailaccount#gmail.com',
password: 'gmailaccount',
host: 'imap.gmail.com',
port: 993,
tls: true
});
/* To Uppercase function */
function toUpper(thing) { return thing && thing.toUpperCase ? thing.toUpperCase() : thing;}
/* function to find attachments in imap email */
function findAttachmentParts(struct, attachments) {
attachments = attachments || [];
for (var i = 0, len = struct.length, r; i < len; ++i) {
if (Array.isArray(struct[i])) {
findAttachmentParts(struct[i], attachments);
}
else {
if (struct[i].disposition && ['INLINE', 'ATTACHMENT'].indexOf(struct[i].disposition.type) > -1) {
attachments.push(struct[i]);
}
}
}
return attachments;
}
function buildAttMessageFunction(attachment) {
var filename = attachment.params.name;
var encoding = attachment.encoding;
return function (msg, seqno) {
var prefix = '(#' + seqno + ') ';
msg.on('body', function(stream, info) {
//Create a write stream so that we can stream the attachment to file;
console.log(prefix + 'Streaming this attachment to file', filename, info);
var writeStream = fs.createWriteStream(filename);
writeStream.on('finish', function() {
console.log(prefix + 'Done writing to file %s', filename);
});
//stream.pipe(writeStream); this would write base64 data to the file.
//so we decode during streaming using
if (toUpper(encoding) === 'BASE64') {
//the stream is base64 encoded, so here the stream is decode on the fly and piped to the write stream (file)
stream.pipe(new Base64Encode()).pipe(writeStream);
} else {
//here we have none or some other decoding streamed directly to the file which renders it useless probably
stream.pipe(writeStream);
}
});
msg.once('end', function() {
console.log(prefix + 'Finished attachment %s', filename);
});
};
}
function openInbox(cb){
imap.openBox('INBOX', true, cb);
}
/* Take all unseen emails, output to console and save them to a text file */
imap.once('ready', function(){
openInbox(function(err, box){
if (err) throw err;
imap.search([ 'UNSEEN' ], function(err, results) {
var messages = [];
if (err) throw err;
var f = imap.fetch(results, { id: 1, bodies: ['HEADER.FIELDS (FROM TO SUBJECT DATE)', '1.1'], struct: true });
f.on('message', function(msg, seqno) {
var body = ''
, header = ''
, parsedMsg = {}
var prefix = '(#' + seqno + ') ';
msg.on('body', function(stream, info) {
var buffer = '', count = 0;
if(info.which === 'TEXT' || info.which === '1.1'){
stream.on('data', function(chunk) { body += chunk.toString('utf8') })
stream.once('end', function() { parsedMsg.body = body })
}
else{
stream.on('data', function(chunk) { header += chunk.toString('utf-8') })
stream.once('end', function() { parsedMsg.header = Imap.parseHeader(header) })
}
stream.pipe(fs.createWriteStream('msg-' + seqno + '-body.txt'));
});
msg.once('attributes', function(attrs) {
var attachments = findAttachmentParts(attrs.struct);
console.log(prefix + 'Has attachments: %d', attachments.length);
for(var i = 0, len = attachments.length; i < len; ++i){
var attachment = attachments[i];
/*This is how each attachment looks like {
partID: '2',
type: 'application',
subtype: 'octet-stream',
params: { name: 'file-name.ext' },
id: null,
description: null,
encoding: 'BASE64',
size: 44952,
md5: null,
disposition: { type: 'ATTACHMENT', params: { filename: 'file-name.ext' } },
language: null
}
*/
console.log(prefix + 'Fetching attachment %s', attachment.params.name);
var f = imap.fetch(attrs.uid , {
bodies: [attachment.partID],
struct: true
});
//build function to process attachment message
f.on('message', buildAttMessageFunction(attachment));
}
parsedMsg.attrs = attrs;
console.log(prefix + 'Attributes: %s', inspect(attrs, false, 8));
});
msg.once('end', function() {
console.log(prefix + 'Finished email');
messages.push( parsedMsg );
});
});
f.once('error', function(err) {
console.log('Fetch error: ' + err);
});
f.once('end', function() {
console.log('Done fetching all messages!');
for( i in messages ) {
console.log( i + ': ' + inspect( messages[i], false, 4 ) );
}
imap.end();
});
});
});
});
imap.once('error', function(err){
console.log(err);
});
imap.once('end', function(){
console.log('Connection ended');
});
imap.connect();
The expected output is a .jpeg image saved to the file directory that is able to be viewed. The actual output I am getting is an image file that when double clicked to view says: "It appears that we don't support this file format."
Unfortunately, I couldn't find a way to use node-imap to properly decode and retrieve email attachments. I ended up using imap-simple instead and was able to achieve the desired result.
I used imap-simple's example code block to retrieve attachments.
var imaps = require('imap-simple');
var config = {
imap: {
user: 'your#email.address',
password: 'yourpassword',
host: 'imap.gmail.com',
port: 993,
tls: true,
authTimeout: 3000
}
};
imaps.connect(config).then(function (connection) {
connection.openBox('INBOX').then(function () {
// Fetch emails from the last 24h
var delay = 24 * 3600 * 1000;
var yesterday = new Date();
yesterday.setTime(Date.now() - delay);
yesterday = yesterday.toISOString();
var searchCriteria = ['UNSEEN', ['SINCE', yesterday]];
var fetchOptions = { bodies: ['HEADER.FIELDS (FROM TO SUBJECT DATE)'], struct: true };
// retrieve only the headers of the messages
return connection.search(searchCriteria, fetchOptions);
}).then(function (messages) {
var attachments = [];
messages.forEach(function (message) {
var parts = imaps.getParts(message.attributes.struct);
attachments = attachments.concat(parts.filter(function (part) {
return part.disposition && part.disposition.type.toUpperCase() === 'ATTACHMENT';
}).map(function (part) {
// retrieve the attachments only of the messages with attachments
return connection.getPartData(message, part)
.then(function (partData) {
return {
filename: part.disposition.params.filename,
data: partData
};
});
}));
});
return Promise.all(attachments);
}).then(function (attachments) {
console.log(attachments);
// =>
// [ { filename: 'cats.jpg', data: Buffer() },
// { filename: 'pay-stub.pdf', data: Buffer() } ]
});
});
On both instances of Base64Encode rename it to Base64Decode.

Categories