iFrame + PDF.js + puppeteer - good combination to generate and show PDF files? - javascript

since monday i try to find the right way of fast and secure generating and displaying PDF Files with the following - maybe im just confused or to blind to see the answer:
Apache - runs my PHP Scripts for my actual project (port 443)
NodeJS - runs a single script for generating PDF files from HTML (port 8080)
What i need: Ensure, that the User is allowed to generate and view the PDF.
It is important to me to have the viewer bar (as seen in the screenshot) is available.
There is a cookie in which a Session-Hash is stored and on which the user authenticates whith on every request (for example via AJAX).
Description of the full procedure:
On one page of my project an iFrame is displayed. In this is a PDF-viewer (from PDF.js) is loaded and some buttons around it:
state before it all begins
Clicking on a button on the left (named with "Load PDF 1", ...) fires the following Event:
$(document).on("click", ".reportelement", function () {
//some data needs to be passed
let data = "report=birthdaylist";
//point iFrame to a new address
$("#pdfViewer").attr("src", "https://example.org/inc/javascript/web/viewer.html?file=https://example.org:8080?" + data);
});
At this point, the iFrame is going to reload the viewer, which takes the GET argument and executes it:
https://example.org/inc/javascript/web/viewer.html?file=https://example.org:8080?" + data //sends the data to the NodeJS script and recieves PDF
==> ?file=https://example.org:8080 //GET... it's bad... How to do a POST in iFrame?!
So, have a look at the NodeJS Script (I have to say I am not very famliar with async and NodeJS):
const https = require("https");
const fs = require("fs");
const puppeteer = require('puppeteer');
const url = require("url");
var qs = require('querystring');
const request = require("request-promise");
const options = {
key: fs.readFileSync("key.pem", "utf-8"),
cert: fs.readFileSync("cert.pem", "utf-8"),
passphrase: 'XXXXXXXX'
};
https.createServer(options, function (req, res) {
(async function () {
if (req.method == 'POST') {
var body = '';
req.on('data', function (data) {
body += data;
// Too much POST data, kill the connection!
// 1e6 === 1 * Math.pow(10, 6) === 1 * 1000000 ~~~ 1MB
if (body.length > 1e6)
req.connection.destroy();
});
req.on('end', function () {
//got a selfsigned certificate only, will change it soon!
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = 0
(async function () {
var result = await request.post('https://example.org/index.php', {
//htpasswd secured at the moment
'auth': {
'user': 'user',
'pass': 'pass',
'sendImmediately': false
},
//i would like to send the cookie oder the hash in it
//or something else to it ensure, that the user is allowed to
form: {
giveme: 'html'
}
},
function (error, response, body) {
//for debugging reasons
console.log("error: " + error);
console.log("response: " + response);
console.log("body: " + body);
}
);
const browser = await puppeteer.launch();
const main = async () => {
//generating pdf using result from request.post
}
const rendered_pdf = await main();
res.writeHead(200, {
"Access-Control-Allow-Headers": "Origin, X-Requested-With, Content-Type, Accept",
"Access-Control-Allow-Origin": "*",
'Content-Type': 'application/pdf',
'Content-Disposition': 'attachment; filename=mypdf.pdf',
'Content-Length': rendered_pdf.length
});
res.end(rendered_pdf);
})();
});
} else if (req.method == 'GET') {
console.log("we got a GET");
} else {
console.log("we got NOTHING");
}
})();
}).listen(8080);
Everything is working fine and PDF's are displayed well - but as i mentioned before, i dont know how to ensure, that the user is allowed to generate and see the PDF.
tldr;
Is there a way (maybe without an iFrame) to secure the user is permitted? It is important to me to have the viewer bar (as seen in the screenshot) is available.
diagram of current procedure

I think i found a solution.
diagram of new approach/token logic
Using a Token (hash or random string) for retrieving a PDF file only should do it.
The Token does not authenticate the user. I think this is an safer approach?
Feel free to comment/answer :)

Related

How to stream different local videos using NodeJS and Express

I would like to stream different user-selected videos to my front-end. For this I am using NodeJS and Express. The source of the -element in which the video should be displayed is 'http://localhost:4201/video'.
The code I am using to stream the video looks like this:
async function loadLocalVideo(_, filePath) {
if (!filePath) {
console.log('No file selected');
return;
} else {
fs.access(filePath, (err) => {
if (err) {
console.log(`File does not exist at path ${filePath}`);
return;
}
});
}
expressApp.get('/video', function (req, res) {
const path = filePath;
const stat = fs.statSync(path);
const fileSize = stat.size;
const range = req.headers.range;
if (range) {
const parts = range.replace(/bytes=/, '').split('-');
const start = parseInt(parts[0], 10);
const end = parts[1] ? parseInt(parts[1], 10) : fileSize - 1;
if (start >= fileSize) {
res.status(416).send(
'Requested range not satisfiable\n' + start + ' >= ' + fileSize
);
return;
}
const chunksize = end - start + 1;
const file = fs.createReadStream(path, { start, end });
const head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
};
res.writeHead(206, head);
file.pipe(res);
} else {
const head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
};
res.writeHead(200, head);
fs.createReadStream(path).pipe(res);
}
});
}
However, when I want to stream a different video and call the same function again but with a different filePath-param the same video keeps playing. How can I stream another video and display it in the -element?
I think you are saying that you are using the same file name and path but want the backend server to change the actual contents that are streamed to the client.
This approach may confuse the caching downstream of your server - i.e. the network or browser cache may not recognise that the file has changed and it may simply serve the cached versions.
Some CDN's will allow you add edge functionality to intercept a request and then decide which of a number of options you want to return, or you could disable caching but I suspect these approaches might be overly complex.
If all you want is to be able to display different videos in a video element on your web page, it may be easier to simply change the source attribute on the video on the page itself and then call load() on the video element on the page.

Not able to stop the spinner on page when lengthy response is coming using Angular and Node

I am facing one issue. In my application my page is requesting to server which is run by Node.js to fetch 2000 record at a time. Here the records are coming from Node but in dev tool console its not expanding and also I have some loader implementation that is not stopping even after receiving the response. I am explaining whole code below.
demo.component.ts:
onFileSelect($event) {
const file = $event.target.files[0];
const fileName = file.name;
const fileExtension = fileName.replace(/^.*\./, '');
if (fileExtension === 'ubot') {
this.loginService.startSpinner(true);
const formData = new FormData();
formData.append('cec', this.cec);
formData.append('screenName', this.intFlow);
formData.append('fileCategory', 'payload');
formData.append('file', file);
this.intentService.reverseFile(formData).subscribe(async (res: any) => {
console.log('response', res);
console.log('succ', res.status);
if (res && res.status === 'success') {
this.loginService.startSpinner(false);
this.intentService.intentData = '';
this.resettoOriginalState();
this.cdref.detach();
await this.repopulateDataFromFile(res.body);
(<HTMLInputElement>document.getElementById('fileuploader')).value = "";
}
else {
this.loginService.startSpinner(false);
this._notifications.create(res.msg, '', this.errorNotificationType);
(<HTMLInputElement>document.getElementById('fileuploader')).value = "";
}
});
} else {
this.loginService.startSpinner(false);
this._notifications.create('Please choose a file', '', this.errorNotificationType);
}
}
Here I am requesting to server through one service which is given below.
reverseFile(value) {
// const token = localStorage.getItem('token')
// let headers = new HttpHeaders({
// Authorization: 'Bearer ' + token
// })
return this.http.post(this.nodeAppUrl + 'reverseFile', value,{ observe: 'response'})
.pipe(
tap((res:any) => this.loginService.validateToken(res)),
map((res:any) => {
return res.body
})
)
}
Here the angular is requesting the spinner is starting and after some sec the response also coming from Node.js but as we have the line this.loginService.startSpinner(false); after success message but the spinner is still running.
Here in the response we have more than 2000 records which is in nested array of object format and we are populating the record using this.repopulateDataFromFile(res.body); method. I am attaching below the screen shot of console tool.
Even the status is success I am not able to stop the spinner and also I am not able to expand the record the console which is showing the value was evaluated upon first expanding.......
Can anybody please give any help why it is happening and how to resolve this.

Chaining routes in NodeJs with values after sending API response

I want to chain routes in NodeJs with values after sending API response to end-ser,
WHY: > The uploaded files would be somewhat large (5-50mb each) and require some processing, can not make my API user wait/timeout while my NodeJS code is working.. so need, 1: Upload files and send success immediately to user, Process files (few promises) and return/log success/failure for notification system.
My individual code blocks are done and working fine (i.e. upload service and file processing service both are good under tests and work nicely when tested individually.)
now with the API to upload in place, I've added following code:
router.post('/upload', upload.array('upload_data', multerMaxFiles), (req, res, next) => {
////some uploading and processing stuff - works nicely
res.json({ 'message': 'File uploaded successfully.' });// shown to API client nicely
console.log("what next? " + utilz.inspect(uploaded_file_paths)) //prints file names on console
next();
});
PROBLEM:
app.use('/api', uploadRoute); //The above code route
//want some processing to be done
app.use(function(req, res, next) {
**want those uploaded file names here**
tried with few response object options but stabs with error
});
OR
use something like ....
app.use(someFunction(uploaded_file_names)); **want those uploaded file names as params**
PS:
Any promise after the file upload success would result in 'Error: Can't set headers after they are sent.', so not helpful writing anything there.
Any suggestions folks.
--
N Baua
Once you've sent a response back to the browser (to keep it from timing out during your long processing time), that http request is done. You cannot send any more data on it and trying to do so will trigger a server-side error. You cannot "chain routes" the way you were asking as you seem to want to do because you simply can't send more data over that http request once you've sent the first response.
There are two common ways to deal with this issue.
As part of your initial response, send back a transaction ID and then have the client poll back every few seconds with an Ajax call asking what the final status is of that transaction. The server can return "in progress" until it is finally done and then it can return the final status.
You can connect a webSocket or socket.io connection from client to server. As part of your initial response to the upload, send back a transaction ID. Then, when the transaction is done server-side, it sends a notification on the webSocket or socket.io connection for that particular client with the transactionID with the final status. The client can then respond accordingly to that final status. You can either keep the webSocket/socket.io connection open for use with other requests or you can then close that connection.
Using either technique, you could also return/send a progress value (like percent complete) that the client could use to display completion progress. This is generally very helpful on the client-side to keep an impatient user from giving up or refreshing the page. If they can see that the processing is proceeding, they won't give up thinking that maybe it stopped working.
This should work with res.write(). But it does depend on your clients cache i think.
I tried this, but it does not work in my firefox.
app.get('/test', function(req, res) {
var count = 0;
var interval = setInterval(function() {
if (count++ === 100) {
clearInterval(interval);
res.end();
}
res.write('This is line #' + count + '\n');
}, 100);
});
After I increased frequency and number of writes it seems to work.
So try:
router.post('/upload', upload.array('upload_data', multerMaxFiles), (req, res, next) => {
////some uploading and processing stuff - works nicely
res.write(JSON.stringify({ 'message': 'File uploaded successfully.' }));// shown to API client nicely
console.log("what next? " + utilz.inspect(uploaded_file_paths)) //prints file names on console
next();
});
//STEP (1)
//Example simulate upload multiple files with chained api calls. In this example the parameters "idFile" and "arrayidFileExample" are helpful.
//You should create and send this data from view.
{
"idFile": "04fe640f6e4w", //id first file
"arrayidFileExample": ["04fe640f6e4w","03g5er4g65erg","g0er1g654er65g4er","0g4er4g654reg654re"] //simulate idFiles array from view
}
//STEP (2)
//your upload files api
app.post('/upload', function(req, res) {
//Express headers, no important in this code
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
let arrayIdFiles = req.body.arrayidFileExample; //create arrayIdFiles
let isBlock = false; //flag to block call loop to api
let countIndex = 0;
let currentIndex = 0;
//STEP (3)
//If "arrayIdFiles" index is not exist, set isBlock to true. Remeber flag to block call loop to api
for(let count in arrayIdFiles) {
if(req.body.idFile == arrayIdFiles[count]) {
console.log("current index --> ", countIndex)
countIndex++;
console.log("next index --> ", countIndex)
if(arrayIdFiles[countIndex] == undefined) {
isBlock = true;
}
break;
}
countIndex++;
currentIndex++;
}
//STEP (4)
//If isBlock is equal false, call get upload api with next idFile. this is simulate "recursive api loop"
if(isBlock == false) {
postUploadFile(
'http://localhost:3500/upload',
{
"idFile":arrayIdFiles[currentIndex + 1], //send next idFile from arrayIdFiles
"arrayidFileExample": arrayIdFiles //initial arrayIdFiles
});
}
//STEP (6)
//response json example
const json = JSON.stringify({
error:false,
statusCode: 200,
body:{
message:'current id file '+req.body.idFile,
}
});
res.write(json);
return res.end();
});
//STEP (5)
//call "/upload" api post
const postUploadFile = (url = '', body = {}) => {
return new Promise((resolve, reject)=>{
axios.post(url, body).then(response => {
return resolve(response.data);
}).catch(error => {});
});
};
//server listen instance
server.listen(3500,() => {
});

Http authorization with node.js

My former server.js is like:
After running the server I could see my index.html
var connect = require('connect');
var serveStatic = require('serve-static');
connect().use(serveStatic(__dirname)).listen(5000, '192.168.xx.xx', function(){
console.log('Server running on 5000');
});
I want to create http login and password to secure the website, so I found online the information of http module: if I put right login and password, I could see congratulations message:
var http = require('http');
var server = http.createServer(function(req, res) {
// console.log(req); // debug dump the request
// If they pass in a basic auth credential it'll be in a header called "Authorization" (note NodeJS lowercases the names of headers in its request object)
var auth = req.headers['authorization']; // auth is in base64(username:password) so we need to decode the base64
console.log("Authorization Header is: ", auth);
if(!auth) { // No Authorization header was passed in so it's the first time the browser hit us
// Sending a 401 will require authentication, we need to send the 'WWW-Authenticate' to tell them the sort of authentication to use
// Basic auth is quite literally the easiest and least secure, it simply gives back base64( username + ":" + password ) from the browser
res.statusCode = 401;
res.setHeader('WWW-Authenticate', 'Basic realm="Secure Area"');
res.end('<html><body>Need authorization</body></html>');
}
else if(auth) { // The Authorization was passed in so now we validate it
var tmp = auth.split(' '); // Split on a space, the original auth looks like "Basic Y2hhcmxlczoxMjM0NQ==" and we need the 2nd part
var buf = new Buffer(tmp[1], 'base64'); // create a buffer and tell it the data coming in is base64
var plain_auth = buf.toString(); // read it back out as a string
console.log("Decoded Authorization ", plain_auth);
// At this point plain_auth = "username:password"
var creds = plain_auth.split(':'); // split on a ':'
var username = creds[0];
var password = creds[1];
if((username == 'admin') && (password == 'admin')) { // Is the username/password correct?
res.statusCode = 200; // OK
res.end('<html><body>Congratulations, feel free to explre!</body></html>');
}
else {
res.statusCode = 401; // Force them to retry authentication
res.setHeader('WWW-Authenticate', 'Basic realm="Secure Area"');
// res.statusCode = 403; // or alternatively just reject them altogether with a 403 Forbidden
res.end('<html><body>You shall not pass</body></html>');
}
}
});
server.listen(5000, function() { console.log("Server Listening on http://localhost:5000/"); });
I am new to nodejs, I want to know how to combine this 2 js? In order to realize my function of adding authorization to my web.
Could I do something to show my index instead of showing congratulation message after putting the login and password?
Thanks a lot.
In order to show HTML page instead of congratulation message, you can follow these steps:
Get request path by req.url, such as / or /introduction.html.
According to the above path, read the corresponding HTML file in server disk, using fs.readFile().
Return HTML file content to browser if the read is successful. Otherwise, return 404 error page.
Here is some example code for above steps:
if((username == 'admin') && (password == 'admin')) { // Is the username/password correct?
res.statusCode = 200; // OK
// res.end('<html><body>Congratulations, feel free to explre!</body></html>');
var requestURL = req.url; // e.g. / or /a or /a.html
var requestFilePath = getFilePathFromRequestURL(requestURL); // you need to implement this logic yourself, such as "/" mapping to "./index.html"
fs.readFile(requestFilePath, function(error, data) {
if (error) {
res.statusCode = 404;
res.write('File not found.');
} else {
res.statusCode = 200;
res.write(data);
}
res.end();
});
}
However, unless you want to write some low-level node.js code to better understand this language, I highly recommend using node.js web framework such as Express. Serve HTTP request using low-level node.js would be tedious, especially in production code.
Also, please note that using WWW-Authenticate Basic for authentication is neither secure nor user-friendly. You need some other way to implement authentication, such as JSON Web Tokens

Unable to get information from <div> Node spider with Cheerio

I'm trying to download the lat/long locations of CCTV locations from the City of Baltimore website (project on the surveillance state) but not getting the console to log anything.
Here's the site:
and my code is:
const request = require('request');
const cheerio = require('cheerio');
let URL = 'https://data.baltimorecity.gov/Public-Safety/CCTV-Locations/hdyb-27ak/data'
let cameras = [];
request(URL, function(err, res, body) {
if(!err && res.statusCode == 200) {
let $ = cheerio.load(body);
$('div.blist-t1-c140113793').each(function() {
let camera = $(this);
let location = camera.text();
console.log(location);
cameras.push(location);
});
console.log(cameras);
}
});
I've tried setting the to blist-t1-c140113793 and blist-td blist-t1-c140113793 but neither has worked.
That's because data for those divs are loaded asynchronously, after the page was rendered. JavaScript is not executed by Cherrio, or any other such library. You'll need either to analyze network traffic and understand which HTTP call loads this data, or use something like Selenium, that actually executes JavaScript inside the browser.

Categories