Downloading multiple files using AngularJS and storing in path - javascript

I am new to Angular,I have a scenario, where I need to download multiple files at the same time. My files are stored in GridFS. I am able to download the files, but for example a pdf is blank. The contentType stored in gridFS is "contentType": "binary/octet-stream", Am I missing out on anything?
My Jade code is
tr(ng-repeat='row in displayedCollection')
td {{ row.Name}}
td {{ row.email}}
td
button.btn.btn-info(type='button',ng-click="downloadDocuments(row.documentsSubmitted)" download) Download Documents
My controller code is
$scope.downloadDocuments = function (row) {
angular.forEach(row, function (value, key) {
var fileToDownload = value.id + "," + 'TrainingPartnerAddingTrainingCenter';
$http.get('/downloadDocuments/' + fileToDownload).success(function (result, status, headers, config) {
var _contentType = (headers('Content-Type'));
var _fileName = headers('FileName');
var blob = new Blob([ result ], { type : _contentType });
var url = (window.URL || window.webkitURL).createObjectURL(blob);
var anchor = angular.element('<a/>');
anchor.attr({
href : url,
target : '_blank',
download : _fileName
})[0].click();
});
});
};
my node.js code is as follows
exports.downloadDocument = function (req, res) {
var paramData = req.params.fileToDownload.split(',');
var role = req.session.user.role;
var conn = mongoose.connection;
var gfs = Grid(conn.db, mongoose.mongo);
routesLogger.logInfo(role, "downloadDocument", "START");
gfs.findOne({_id: paramData[0], root: paramData[1]}, function (err, file) {
if (err) {
routesLogger.logError(role, "downloadDocument", err);
return res.status(400).send(err);
}
else if (!file) {
routesLogger.logError(role, "downloadDocument", "No File Found for download");
return res.status(404).send('Error on the database looking for the file.');
}
else {
res.set('Content-Type', file.contentType);
res.set('Content-Disposition', 'attachment; filename="' + file.filename + '"');
var readstream = gfs.createReadStream({
_id: paramData[0],
root: paramData[1]
});
readstream.on("error", function (err) {
routesLogger.logError(role, "downloadDocument", err);
res.end();
});
readstream.pipe(res);
routesLogger.logInfo(role, "downloadDocument", "END");
}
});
};

So the mistake I was doing was not adding the parameter { responseType: 'arraybuffer' }. I found the answer in this link
AngularJS: Display blob (.pdf) in an angular app
$http.get('/downloadDocuments/' + fileToDownload,{ responseType: 'arraybuffer' }).success(function (result, status, headers, config) {
console.log(headers('Content-Type'));
var _contentType = (headers('Content-Type'));
var _fileName = headers('FileName');
var blob = new Blob([result], {type: _contentType });
saveAs(blob, _fileName);

Related

pdf encryption and uploading aws S3 not working

In this code i am trying to create pdf file and encrypting, i am able to create pdf encryption, in this code i am creating one Tempfile in my local machine after that i am encrypting that file and in here 'qpdf --encrypt Decorpot test 40 -- ${tempFileName} --replace-input' i am just replacing the non encrypted file with encrypted file and its storing correctly as a encrypted file in local machine,but when i am going to upload in s3 aws only non encryted file is added. i tried so many methods..please help me.. ThankYou
module.exports.generateChecklistPdf2 = function () {
return new Promise((resolve, reject) => {
let htmlFilePath = '';
htmlFilePath = path.join(__dirname, '../static/html/checkList.html');
htmlFilePath = htmlFilePath.replace(new RegExp(/\\/g), '/');
pdfFilePath = path.join(__dirname, '../uploads/' + Math.ceil(Math.random() *
810000) + '.pdf');
pdfFilePath = pdfFilePath.replace(new RegExp(/\\/g), '/');
let date = new Date();
return ejs.renderFile(htmlFilePath, { moment: moment, date }, {}, function (err,
htmlFile) {
if (err) return console.log(err);
var tempFileName = pdfFilePath;
var file = fs.createWriteStream(tempFileName);
assetsPath = path.join(__dirname, '../static/');
assetsPath = assetsPath.replace(new RegExp(/\\/g), '/');
options = {
base: 'file:///' + assetsPath,
}
setTimeout(() => {
var cmd = `qpdf --encrypt Decorpot test 40 -- ${tempFileName}
--replace-input`;
exec(cmd, function (err) {
if (err) {
console.error('Error occured: ' + err);
} else {
console.log('PDF encrypted :)', tempFileName);
}
});
}, 1000)
return pdf.create(htmlFile, options).toStream(function (err, stream) {
stream.pipe(file).on('close', () => {
formData = {
customerName: "customername",
file: fs.createReadStream(pdfFilePath)
}
let url = '';
if (process.env.NODE_ENV == 'production') {
url = 'http://13.233.26.162:5003/api/file/upload';
} else {
url = 'http://localhost:5003/api/file/upload';
}
return request.post({ url: url, formData: formData },
function (err, res) {
if (err) return console.log(err);
console.log(res.body)
let resolveObject = {};
resolveObject.pdfUrl = res.body;
resolve(resolveObject);
});
});
});
});
});
}

Decoding base64 image email attachment retrieved using imap nodejs

I'm attempting to retrieve an e-mail attachment image using Node.js imap that can be found here: https://github.com/mscdex/node-imap
After retrieving the image I want to save it to file and then save the name in the MySQL database so I can retrieve it in the front end using EJS.
I've gotten as far as retrieving the e-mail attachment image and attempting to decode it and then save it. Unfortunately when opened from the folder it says: "It appears that we don't support this file format".
Upon further investigation, if I convert it to a base64 string using this online tool: https://www.motobit.com/util/base64-decoder-encoder.asp and then go to a base64 to image converter (https://codebeautify.org/base64-to-image-converter), it shows the image just fine.
I'm skeptical that my code is actually converting the image to base64 as the file size goes up from 250kb to 332kb.
I am unsure of how to proceed to allow the photo to be properly decoded for viewing as the original .jpeg image.
var fs = require('fs'), fileStream;
var {Base64Encode} = require('base64-stream');
const Imap = require('imap'),
inspect = require('util').inspect;
var imap = new Imap({
user: 'gmailaccount#gmail.com',
password: 'gmailaccount',
host: 'imap.gmail.com',
port: 993,
tls: true
});
/* To Uppercase function */
function toUpper(thing) { return thing && thing.toUpperCase ? thing.toUpperCase() : thing;}
/* function to find attachments in imap email */
function findAttachmentParts(struct, attachments) {
attachments = attachments || [];
for (var i = 0, len = struct.length, r; i < len; ++i) {
if (Array.isArray(struct[i])) {
findAttachmentParts(struct[i], attachments);
}
else {
if (struct[i].disposition && ['INLINE', 'ATTACHMENT'].indexOf(struct[i].disposition.type) > -1) {
attachments.push(struct[i]);
}
}
}
return attachments;
}
function buildAttMessageFunction(attachment) {
var filename = attachment.params.name;
var encoding = attachment.encoding;
return function (msg, seqno) {
var prefix = '(#' + seqno + ') ';
msg.on('body', function(stream, info) {
//Create a write stream so that we can stream the attachment to file;
console.log(prefix + 'Streaming this attachment to file', filename, info);
var writeStream = fs.createWriteStream(filename);
writeStream.on('finish', function() {
console.log(prefix + 'Done writing to file %s', filename);
});
//stream.pipe(writeStream); this would write base64 data to the file.
//so we decode during streaming using
if (toUpper(encoding) === 'BASE64') {
//the stream is base64 encoded, so here the stream is decode on the fly and piped to the write stream (file)
stream.pipe(new Base64Encode()).pipe(writeStream);
} else {
//here we have none or some other decoding streamed directly to the file which renders it useless probably
stream.pipe(writeStream);
}
});
msg.once('end', function() {
console.log(prefix + 'Finished attachment %s', filename);
});
};
}
function openInbox(cb){
imap.openBox('INBOX', true, cb);
}
/* Take all unseen emails, output to console and save them to a text file */
imap.once('ready', function(){
openInbox(function(err, box){
if (err) throw err;
imap.search([ 'UNSEEN' ], function(err, results) {
var messages = [];
if (err) throw err;
var f = imap.fetch(results, { id: 1, bodies: ['HEADER.FIELDS (FROM TO SUBJECT DATE)', '1.1'], struct: true });
f.on('message', function(msg, seqno) {
var body = ''
, header = ''
, parsedMsg = {}
var prefix = '(#' + seqno + ') ';
msg.on('body', function(stream, info) {
var buffer = '', count = 0;
if(info.which === 'TEXT' || info.which === '1.1'){
stream.on('data', function(chunk) { body += chunk.toString('utf8') })
stream.once('end', function() { parsedMsg.body = body })
}
else{
stream.on('data', function(chunk) { header += chunk.toString('utf-8') })
stream.once('end', function() { parsedMsg.header = Imap.parseHeader(header) })
}
stream.pipe(fs.createWriteStream('msg-' + seqno + '-body.txt'));
});
msg.once('attributes', function(attrs) {
var attachments = findAttachmentParts(attrs.struct);
console.log(prefix + 'Has attachments: %d', attachments.length);
for(var i = 0, len = attachments.length; i < len; ++i){
var attachment = attachments[i];
/*This is how each attachment looks like {
partID: '2',
type: 'application',
subtype: 'octet-stream',
params: { name: 'file-name.ext' },
id: null,
description: null,
encoding: 'BASE64',
size: 44952,
md5: null,
disposition: { type: 'ATTACHMENT', params: { filename: 'file-name.ext' } },
language: null
}
*/
console.log(prefix + 'Fetching attachment %s', attachment.params.name);
var f = imap.fetch(attrs.uid , {
bodies: [attachment.partID],
struct: true
});
//build function to process attachment message
f.on('message', buildAttMessageFunction(attachment));
}
parsedMsg.attrs = attrs;
console.log(prefix + 'Attributes: %s', inspect(attrs, false, 8));
});
msg.once('end', function() {
console.log(prefix + 'Finished email');
messages.push( parsedMsg );
});
});
f.once('error', function(err) {
console.log('Fetch error: ' + err);
});
f.once('end', function() {
console.log('Done fetching all messages!');
for( i in messages ) {
console.log( i + ': ' + inspect( messages[i], false, 4 ) );
}
imap.end();
});
});
});
});
imap.once('error', function(err){
console.log(err);
});
imap.once('end', function(){
console.log('Connection ended');
});
imap.connect();
The expected output is a .jpeg image saved to the file directory that is able to be viewed. The actual output I am getting is an image file that when double clicked to view says: "It appears that we don't support this file format."
Unfortunately, I couldn't find a way to use node-imap to properly decode and retrieve email attachments. I ended up using imap-simple instead and was able to achieve the desired result.
I used imap-simple's example code block to retrieve attachments.
var imaps = require('imap-simple');
var config = {
imap: {
user: 'your#email.address',
password: 'yourpassword',
host: 'imap.gmail.com',
port: 993,
tls: true,
authTimeout: 3000
}
};
imaps.connect(config).then(function (connection) {
connection.openBox('INBOX').then(function () {
// Fetch emails from the last 24h
var delay = 24 * 3600 * 1000;
var yesterday = new Date();
yesterday.setTime(Date.now() - delay);
yesterday = yesterday.toISOString();
var searchCriteria = ['UNSEEN', ['SINCE', yesterday]];
var fetchOptions = { bodies: ['HEADER.FIELDS (FROM TO SUBJECT DATE)'], struct: true };
// retrieve only the headers of the messages
return connection.search(searchCriteria, fetchOptions);
}).then(function (messages) {
var attachments = [];
messages.forEach(function (message) {
var parts = imaps.getParts(message.attributes.struct);
attachments = attachments.concat(parts.filter(function (part) {
return part.disposition && part.disposition.type.toUpperCase() === 'ATTACHMENT';
}).map(function (part) {
// retrieve the attachments only of the messages with attachments
return connection.getPartData(message, part)
.then(function (partData) {
return {
filename: part.disposition.params.filename,
data: partData
};
});
}));
});
return Promise.all(attachments);
}).then(function (attachments) {
console.log(attachments);
// =>
// [ { filename: 'cats.jpg', data: Buffer() },
// { filename: 'pay-stub.pdf', data: Buffer() } ]
});
});
On both instances of Base64Encode rename it to Base64Decode.

Dynamically load subtitles into video

Here is the situation: The server receives the srt file stream. The server converts the stream into vtt format. The resulting stream is then buffered into a buffer which is then sent to client through a io.socket.
Here is the server code:
self.expressApp.post("/upload", function (req, res) {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
var subchunks = [],
sub = file.pipe(srt2vtt());
sub.on('data',function(buffer){
subchunks.push(buffer);
});
sub.on('end',function(){
var buffer = Buffer.concat(subchunks);
socket.emit('Subtitles',buffer);
});
});
busboy.on('finish', function() {
res.writeHead(200, { 'Connection': 'close' });
res.end();
});
return req.pipe(busboy);
});
Here is the client code:
var subobj = new Blob([new Uint8Array(payload)],{type: "text/vtt"}),
url = (URL || webkitURL).createObjectURL(subobj),
track = document.createElement("track");
track.kind = "captions";
track.label = "English";
track.srclang = "en";
track.src = url;
track.addEventListener("load", function() {
this.mode = "showing";
videoElement.textTracks[0].mode = "showing";
});
videoElement.append(track);
Why this is not working. Any help is appreciated.
Thanks.
I'm using plyr, there are some bug for dashjs captions, so i created a blob of that webvtt and append that on video container, here is my code
fetch("url")
.then(response => response.text())
.then(result => {
var blob = new Blob([result], {
type: "text/vtt"
});
const track = document.createElement('track');
Object.assign(track, {
label: 'language',
default: true,
src: window.URL.createObjectURL(blob)
});
video.appendChild(track);
})
.catch(error => console.log('error', error));`
Here is the solution.
What was I doing wrong?
I was sending the vtt stream as binary array rather than plan text string.
Server-side Code:
self.expressApp.post("/upload", function (req, res) {
var busboy = new Busboy({ headers: req.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
var subchunks = "",
sub = file.pipe(srt2vtt());
sub.on('data',function(buffer){
subchunks += buffer.toString('utf8');
});
sub.on('end',function(){
socket.emit('Subtitles',subchunks);
});
});
busboy.on('finish', function() {
res.writeHead(200, { 'Connection': 'close' });
res.end();
});
return req.pipe(busboy);
});
Client-side Code:
var subobj = new Blob([new Uint8Array(payload)],{type: "text/vtt"}),
url = (URL || webkitURL).createObjectURL(subobj),
track = document.createElement("track");
track.kind = "captions";
track.label = "English";
track.srclang = "en";
track.src = url;
videoElement.append(track);
track.mode = "showing";
videoElement.textTracks[0].mode = "showing";

Using an array of keys to push multiple s3 objects into 1 array

const AWS = require("aws-sdk");
const s3 = new AWS.S3();
function getAttachments(file, bucket) {
var paramsArray = [];
var attachmentArray = [];
for (var a = 0; a < file.length; a++) {
paramsArray.push({
Bucket: bucket,
Key: file[a]
});
}
paramsArray.map(x = > s3.getObject(x, function (err, data) {
if (err) {
console.log(err);
}
attachmentArray.push({
filename: x.Key,
content: data.Body
});
console.log(attachmentArray)
})
)
}
var attachments = ['a.doc', 'b.doc', 'c.doc', 'd.doc', 'e.doc']
getAttachments(attachments,
'exampleBucket')
I want to log a single array of objects, each with the key as a filename and a buffer string as the content. However, in this example I am logging multiple arrays increasing in size for each element in the attachments array.
These are in the correct format for example: [{filename: a.doc, content: 'Buffer String'}], but I only want one array with the 5 objects returned in that format.
Can anyone tell me how to do this?
here are he list of the file names I am trying to download from ascendon1 bucket
here is the AJAX request
var data = {fileNames: ['1.txt','2.txt']};
$.ajax({
type: "POST",
url: 'downloadFiles',
data: JSON.stringify(data),
processData: false,
dataType: "json",
contentType: 'application/json',
success:function(data){
alert(data);
}
});
Here is the NodeJS Code
app.post('/downloadFiles',function(req,res){
var promises =[];
var filesContent = [];
var filesToDownload = req.body.fileNames
for(var z=0; z<req.body.fileNames.length;z++){
var fileName = filesToDownload[z];
promises.push(getAttachment(fileName));
}
Promise.all(promises).then(function(data){
for(var z=0; z<req.body.fileNames.length;z++){
var contents={
filename: filesToDownload[z],
content: data[z].Body
};
filesContent.push(contents);
}
console.log(filesContent);
}).catch(function(error){
console.log(error.stack);
})
})
function getAttachment(fileName){
var params ={
Bucket: "ascendon1",
Key: fileName
}
return s3.getObject(params).promise();
}
here is the final output, we got the all the contents that we requested in a single array i.e is filesContent
Just change your code to this one:
const AWS = require("aws-sdk");
const s3 = new AWS.S3();
const attachments = ['a.doc', 'b.doc', 'c.doc', 'd.doc', 'e.doc']
getAttachments(attachments, bucket)
.then(console.log)
.catch(console.error);
function getAttachments(files, bucket) {
return files.map(filename => getAttachment(filename, bucket));
}
function getAttachment(filename, bucket){
return new Promise((resolve,reject) => {
const params = {
Bucket: bucket,
Key: filename
};
s3.getObject(params, (error, data) => {
if(error) return reject(error);
return resolve({
filename: filename,
content: data.Body
});
});
});
}
Or you can drop a getAttachments function at all.
Hope it helps.

angular js file upload using http post method

I am tring to upload a file using angular $http method to node backend
I want to upload the form with additional form fields.
This is my code
var data = new FormData();
data.append('title', 'sometitle');
data.append('uploadedFile', $scope.uploadedFile);
FileService.upload(url, data, function(data, status) {
if(status===HTTP_OK) {
$scope.uploadSuccess = true;
$scope.showUploadProgressBar = false;
} else {
// error occured
console.log(data);
}
});
FileService
FileService.upload = function(url, data, callback) {
$http({
method : 'POST',
url : url,
data : data,
headers: {'Content-Type': undefined },
transformRequest: angular.identity
}).success(function(data, status) {
callback(data, callback);
}).error(function(data, status) {
callback(data, status);
});
};
I am using node multiparty module for file upload. I am receiving the file correctly. But the field value for title is undefined.
I don't know why title value is undefined
Node.js backend file upload handler
var form;
if(options.uploads.tempDir) {
form = new multiparty.Form({uploadDir : options.root + '/' + options.uploads.tempDir});
} else {
form = new multiparty.Form();
}
form.on('file', function(name, receivedFile) {
var tmpPath = receivedFile.path,
fileName = receivedFile.originalFilename,
targetDirectory = uploadDirectory + '/' + req.params.id,
targetPath = targetDirectory + '/' + fileName,
file = {
filePath : targetPath,
tempPath : tmpPath,
fileName : fileName,
size : receivedFile.size
};
fileUploadStatus.file = file;
// move file
fse.move(tmpPath, targetPath, function(err) {
if(err) {
console.log('Error moving file [ ' + targetPath + ' ] ' + JSON.stringify(err));
}
});
});
form.on('error', function(err) {
fileUploadStatus.err = err;
req.fileUploadStatus = fileUploadStatus;
next();
});
form.on('close', function() {
req.fileUploadStatus = fileUploadStatus;
next();
});
form.on('field', function(name, value) {
console.log('field called');
console.log(name);
console.log(value);
req.body = req.body || {};
req.body[name] = value;
});
// ignoring parts. Implement any other logic here
form.on('part', function(part) {
var out = new stream.Writable();
out._write = function (chunk, encoding, done) {
done(); // Don't do anything with the data
};
part.pipe(out);
});
// parsing form
form.parse(req);

Categories