Download a PDF automatically once its generated (Node, Express) - javascript

I am using following node module html-pdf
to convert html to pdf. I have successfully converted html to pdf but I am having trouble downloading the file once it has been created.
Code to generate PDF:
var fs = require('fs');
var pdf = require('html-pdf');
var html = fs.readFileSync('./test/businesscard.html', 'utf8');
var options = { format: 'Letter' };
pdf.create(html, options).toFile('./businesscard.pdf', function(err, res) {
if (err) return console.log(err);
console.log(res); // { filename: '/app/businesscard.pdf' }
});
How can I either open the PDF within the browser for the user to see or automatically download the pdf within the browser without the user having to go through another step.

I just made some changes to your code.In this code I create a route. Whenever you made a request with this route it converts HTML file to PDF and creating a pdf file into your directory from where you are executing. And at the sametime it displays the html file in browser with downloading option also. Hope this helps for you. And here is my code.
var express=require('express');
var fs = require('fs');
var pdf = require('html-pdf');
var html = fs.readFileSync('C:/Users/nodejs/tasks/file.html', 'utf8');
var options = { format: 'Letter' };
var app=express();
app.get('/file',function(request,response)
{
pdf.create(html, options).toFile('./businesscaw.pdf', function(err, res) {
if (err) return console.log(err);
console.log(res);
var file= 'C:/Users/nodejs/tasks/businesscaw.pdf';
fs.readFile(file,function(err,data){
response.contentType("application/pdf");
response.send(data);
});
});
});
app.listen(3000,function(){
console.log("Server listening on port http://loalhost:3000");
});
Output:
See the output in browser like : localhost:3000/file

Related

Can't read file after writing it with fs

I want to read an image ,write it in a folder and read it again to get it's base64
I get the following error:
Error: ENOENT: no such file or directory, access 'C:\Workspace\Project\upload_storage\image.jpg'
at Object.accessSync (fs.js:192:3)
My code:
const FS = require("fs");
var multiparty = require('multiparty');
var path = require('path');
function readAndWriteFile(file , newPath){
FS.readFileSync(file.path, (err, data)=>{
FS.writeFileSync(newPath, data, (err)=>{
});
});
}
function base64Encode(path,filemime) {
FS.readFileSync(path, {encoding: 'base64'}, (err, data)=>{
if (err) {
throw err;
}
return `data:${filemime};base64,${data}`;
});
}
...
var form = new multiparty.Form()
//retrieve files using multiparty form
form.parse(req, function(err, fields, files) {
var document;
const documents = files.file;
for(i=0; i<documents.length; i++){
document=documents[i];
const contentType = String(document.headers["content-type"]);
filePath = path.join(process.cwd(),'/upload_storage/',document.originalFilename);
readAndWriteFile(document,filePath);
// // convert image to base64 encoded string
const base64str = base64Encode(filePath, contentType);
console.log(base64str);
}
}
if I comment the base64Encode function call the files get created.
What am I doing wrong?
Don't use callbacks with _fileSync. But it looks like you want copyFileSync followed by unlinkSync anyway:
const fs = require('fs');
function readAndWriteFile(file , newPath){
fs.copyFileSync(file.path, newPath);
fs.unlinkSync(file.path)
}
Did you try reading the documentation for fs?
More reading and examples in this question,

Local file download is corrupted

I am trying to download a file to my windows pc using nodejs
I tried the following code. the problem is the file which i download from nodejs is 185kb and the actual original file size is 113kb(found by directly downloading from browser)
request = require('request');
function download(url, dest, cb){
request.head(url, function(err, res, body){
request(url).pipe(fs.createWriteStream(dest)).on('close', function(){
cb();
});
});
};
I also tried downloading the file using a different code
function download(url, dest, cb) {
var file = fs.createWriteStream(dest);
var request = https.get(url, function (response) {
response.pipe(file);
file.on('finish', function () {
file.close(cb);
file.end();
});
});
}
But the same bug happened
The problem is i am trying to open that file in photoshop , but it fails , saying the file is corrupted, please help
This code (using the built-in https module) should work correctly. The stream will close automatically, there's no need to close it, the autoClose parameter defaults to true when creating a write file stream.
See docs at: fs.createWriteStream.
If the file is still too large it is likely that you are not using the direct image link, try selecting "View image" / "Open image in new tab" etc. in your browser and using that link instead.
const https = require("https");
const fs = require("fs");
const url = "https://upload.wikimedia.org/wikipedia/commons/thumb/e/e1/FullMoon2010.jpg/1024px-FullMoon2010.jpg";
const fileStream = fs.createWriteStream("test.jpg");
https.get(url, response => {
response.pipe(fileStream);
});
You can also use the request library:
const request = require("request");
const fs = require("fs");
const url = "https://upload.wikimedia.org/wikipedia/commons/thumb/e/e1/FullMoon2010.jpg/1024px-FullMoon2010.jpg";
const fileStream = fs.createWriteStream("request-test.jpg");
const req = request(url);
req.on("response", response => {
response.pipe(fileStream);
});

NodeJS Process Uploaded file with readline

I'm getting a file the client uploads which I receive with the following route:
app.post('/upload', function(req, res){
var file = req.files.file;
//Process file inline
funcs.Process(file, req).then((data)=>{
//res.setHeader('Content-Length', stat.size);
res.setHeader('Content-Type', 'text/plain');
res.setHeader('Content-Disposition', 'attachment; filename=output.txt');
res.write(data, 'binary');
res.end();
}).catch((err)=>{
res.redirect('/?err='+err);
});
});
This is the code for funcs.Process:
Process: function(file, req){
return new Promise(function(resolve, reject){
var lineReader = require('readline').createInterface({
input: fs.createReadStream(file)
});
var output = "";
lineReader.on('line', function (line) {
//some checks
ProcessLine(line).then((data)=>{
output += data + "\n";
}).catch((err)=>{
reject(`Invalid line in file [${line}].`);
});
});
lineReader.on('close', () => {
resolve(output);
});
});
However, I am getting the following error:
TypeError: path must be a string or Buffer,
produced by the readline input: fs.createReadStream(file)
As this is an uploaded file, how do I use it in the readline createInterface?
createReadStream accepts a string path.
You're passing it req.files.file, which is the file object created by express-fileupload.
You have two options:
Use the mv function (req.files.file.mv('<path>', callbackFunc)) to first move the file to a known path on your server. Then pass this known path to fs.createReadStream.
Create the read stream directly from the file data buffer exposed in req.files.file.data using streamifier or similar library.
var fileStream = require('streamifier').createReadStream(req.files.file.data)
This stream can then be passed to readline as input directly.

Writing to file only writes last item, not all items, why?

i'm trying to write a feed to a file using node.js. the problem is, it doesn't write all the feeds, only the last 1.
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.writeFile("articles.json", JSON.stringify(article.title), function(err) {
if(err) {
console.log(err);
}
});
});
Why?
Just change fs.writeFile( to fs.appendFile( and you're fine.
fs.writeFile overwrites your file each time you call it whereas fs.appendFile adds to a file.
As #Robert says you should use appendFile, but also note that that change won't write out valid json. I'm not sure what output you're trying to achieve - it you just want the titles you could write out a txt file with a title on each line like so:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
feedParser.parseUrl(url).on('article', function(article) {
console.log('title; ', article.title);
fs.appendFile("articles.txt", article.title + "\n", function(err) {
if(err) {
console.log(err);
}
});
});
To write out json you can do:
var fs = require('fs');
var feedParser = require('ortoo-feedparser')
var url = "http://iwnsvg.com/feed";
let titles = [];
feedParser.parseUrl(url)
.on('article', function (article) {
console.log('title; ', article.title);
titles.push(article.title);
})
.on('end', function () {
fs.writeFile('articles.json', JSON.stringify({ titles }), function (err) {
if (err) {
console.log(err);
}
});
});
fs.writeFile comes with some options like flag. Default value of flag is w for write, so your data are replaced by the new one.
Use 'a' instead
{flag:'a'}
and you'll be fine.
But don't forget that WriteFile or AppendFile are upper layer in fs library which open and close file each time you need to add data.
Preferably, use fs.createWriteStream which returns a writable stream (writable file handle in other languages). Then use and reuse this stream when you need to write data in your file.

How to upload image to S3 using Node

I am writing an Express app that takes in a base64 encoded string that represents an image. Right now, i'm not really sure how I can take that string and upload the image to AWS S3, so i'm reading in the encoded image string data, decoding it, writing a file using fs, and then trying to upload. I have this working for an endpoint that just takes in a raw file, and all of its content is correctly uploaded to AWS s3.
Now when I try to do what I described above, i'm able to upload to S3, but the file has 0kb and is empty, and i'm not sure why. I tested just taking the stringData and writing a file to a test file, and it works. However, when I try uploading to s3, the file shows but it's empty. Here is my code:
router.post('/images/tags/nutritionalInformation/image/base64encoded', function (req, res) {
console.log(req.body.imageString);
var base64Stream = req.body.imageString;
var imgDecodedBuffer = decodeBase64Image(base64Stream);
console.log(imgDecodedBuffer);
// write to image file
var prefix = guid().toString() + ".jpg";
var filePath = './uploads/' + prefix;
console.log(filePath);
fs.writeFile(filePath, imgDecodedBuffer.data, function(err) {
console.log(err);
});
var stream = fs.createReadStream(filePath);
console.log(stream);
return s3fsImpl.writeFile(prefix, stream).then(function () {
fs.unlink(filePath, function (err) {
if (err) {
console.error(err);
}
});
});
})
Here are the relevant import statements:
var fs = require('fs');
var s3fs = require('s3fs');
var multiparty = require('connect-multiparty'),
multipartyMidleware = multiparty();
var s3fsImpl = new s3fs('blahblah', {
accessKeyId: 'ACCESS_KEY_ID',
secretAccessKey: 'SECRET'
});
Any help would be greatly appreciated!
If you simply just pass in the buffer, which I presume is in your imgDecodedBuffer.data value, it should work.

Categories