Serverless.yml AWS Lambda Embedding EJS Template into Handler.js - javascript

I'm trying to embed an EJS template named 'ui.ejs' into handler.js.
The aim is to capture URL query parameters, then pass them to a function name 'ui.js' to capture data, that data is then passed onto the EJS UI template called 'ui.ejs'.
But I keep getting this error:
Failure: ENOENT: no such file or directory, open 'D:\Studio\Work\YellowCard\dynamo_serverless_rest\.webpack\service/views/ui.ejs'
Seems webpack is interfering with my directory paths.
The directory is under '/views/ui.ejs' NOT '.webpack\service/views/ui.ejs' like the error claims.
How do I fix it?
Thanks in advance..!
Here's the handler.js code:
// Import modules and dependencies
import {ui} from './index.js';
var ejs = require('ejs');
var fs = require('fs');
// Implementing the UI
export async function UserInterface(event, context, callback) {
// Capture data event parameters
const e = event.queryStringParameters;
// Get UI parameters
let params = await ui(e);
var htmlContent = fs.readFileSync(__dirname + '/views/' + 'ui.ejs', 'utf8');
var template = ejs.compile(htmlContent);
return {
statusCode: 200,
headers: { 'Content-type': 'text/html' },
body: JSON.stringify(template(params)),
};
};

You can use copy-webpack-plugin to copy the views folder to the destination directory.
In your webpack configuration file (webpack.config.js)
const CopyPlugin = require("copy-webpack-plugin");
// ...
plugins: [
new CopyPlugin({
patterns: [
{ from: "views", to: path.join(__dirname, '.webpack', 'service', 'views') },
],
}),
],
And also update serverless.yml file to include views directory to your lambda function
package:
include:
- views/**

The Fix:
I found a quick solution using the 'path' module.
import {ui} from './index.js';
var ejs = require('ejs');
var fs = require('fs');
const path = require('path');
// Implementing the UI
export async function UserInterface(event, context, callback) {
// Capture data event
const e = event.queryStringParameters;
// Get UI parameters
let params = await ui(e);
var htmlContent = fs.readFileSync(path.resolve(process.cwd(), './views/' + 'ui.ejs'), 'utf8');
var template = ejs.compile(htmlContent);
return {
statusCode: 200,
headers: { 'Content-type': 'text/html' },
body: JSON.stringify(template(params)),
};
};

Related

Error occurred while pinning file to IPFS: TypeError [ERR_INVALID_ARG_TYPE]: The "url" argument must be of type string

When i add the files to another project I get the following error and i can't figure out why.
When I run "node scripts1/runScript.js" error i get is:
Error occurred while pinning file to IPFS: TypeError [ERR_INVALID_ARG_TYPE]: The "url" argument must be of type string. Received undefined
at new NodeError (node:internal/errors:371:5)
at validateString (node:internal/validators:119:11)
at Url.parse (node:url:169:3)
at Object.urlParse [as parse] (node:url:156:13)
at dispatchHttpRequest (C:\Users\alaiy\Documents\NFTs\nft-mix-main\node_modules\axios\lib\adapters\http.js:118:22)
at new Promise (<anonymous>)
at httpAdapter (C:\Users\alaiy\Documents\NFTs\nft-mix-main\node_modules\axios\lib\adapters\http.js:48:10)
at dispatchRequest (C:\Users\alaiy\Documents\NFTs\nft-mix-main\node_modules\axios\lib\core\dispatchRequest.js:58:10)
at Axios.request (C:\Users\alaiy\Documents\NFTs\nft-mix-main\node_modules\axios\lib\core\Axios.js:108:15)
at wrap (C:\Users\alaiy\Documents\NFTs\nft-mix-main\node_modules\axios\lib\helpers\bind.js:9:15) {
code: 'ERR_INVALID_ARG_TYPE'
}
The runScript.js code is :
const path = require('path');
const pinFileToIPFS = require('./pinFileToIPFS');
const filePath = path.join(__dirname, '../img/Toad-Licking-Mario.jpg');
// const filePath = path.join(__dirname, '../data/metadata.json');
pinFileToIPFS(filePath);
The pinFileToIPFS.js file which contains the "url: pinataEndpoint" i think the error message is referring too:
require('dotenv').config();
const fs = require('fs');
const axios = require('axios');
const FormData = require('form-data');
const { storeDataToFile } = require('./ipfsHelper.js');
// Calls Pinata API's to pin file to IPFS
const pinFileToIPFS = async (filePath) => {
const pinataEndpoint = process.env.PINATA_ENDPOINT;
const pinataApiKey = process.env.PINATA_API_KEY;
const pinataApiSecret = process.env.PINATA_API_SECRET;
const form_data = new FormData();
try {
form_data.append('file', fs.createReadStream(filePath));
const request = {
method: 'post',
url: pinataEndpoint,
maxContentLength: 'Infinity',
headers: {
pinata_api_key: pinataApiKey,
pinata_secret_api_key: pinataApiSecret,
'Content-Type': `multipart/form-data; boundary=${form_data._boundary}`,
},
data: form_data,
};
console.log('request:', request);
const response = await axios(request);
console.log('Successfully pinned file to IPFS : ', response);
await storeDataToFile(response.data);
console.log('Successfully added IPFS response to json file');
} catch (err) {
console.log('Error occurred while pinning file to IPFS: ', err);
}
};
module.exports = pinFileToIPFS;
Lastly my .env file is:
# export PRIVATE_KEY=asafdagadd
# export WEB3_INFURA_PROJECT_ID=asdfsdf
# export ETHERSCAN_TOKEN=asdfasdfasdf
export IPFS_URL=blah blah
export UPLOAD_IPFS=true
export PINATA_API_KEY='blah blah'
export PINATA_API_SECRET='blah blah'
export PINATA_ENDPOINT="https://api.pinata.cloud/pinning/pinFileToIPFS"
I can upload the package.json file aswell if needed.
Any help is appreciated!
SOLUTION: Turned out the export syntax I was using in my .env file should not be there. export syntax is for python scripts which I was previously using.
As I am running javascript scripts, I deleted the export syntax and everything worked like a charm!
I was facing same error, the i find out that my config folder suppose to be in the folder in which I was executing it from.
This error comes when your url value is not getting the env variables.
So in my case solution was something like this-
config/
--default.json
scripts/
--server.js
So it was searching config folder inside scripts folder.
Later I run my file outside from scripts folder then it worked fine.
node scripts/server.js

(Express js) How should I use another module in my router? (I keep getting undefined reference error)

I'm making a simple pdf reader application (using the pdf.js library from Mozilla), that the user selects the file and then the website automatically goes to the /reader page, and then it shows the pdf, but I don't know how should I actually transfer that pdf file to my other page, I've tried doing it like this:
This is my server.js
import express from 'express';
const app = express();
import router from '../routes/reader.js';
const port = 5500;
const ip = '127.0.0.1';
app.set('view engine', 'ejs');
app.use('/css', express.static('css'));
app.use('/node_modules', express.static('node_modules'));
app.use('/js', express.static('js'));
app.use('/docs', express.static('docs'));
app.use('/reader', readerRouter);
app.get('/', (req, res) => {
res.render('index')
});
app.listen(port, IP);
This is where I get the file from the input and send it to the specified URL, this is in a script that I've added to my home page with script tags
uploadInput.onchange = (e) => {
let file = e.target.files[0];
let fd = new FormData();
fd.append(uploadInput, file);
$.ajax({
url: '/reader',
method: 'POST',
data: fd,
cache: false,
processData: false,
contentType: 'application/pdf'
}).done(() => {
window.location.href = '/reader';
});
}
And in my /reader router, I did this:
const express = require('express');
import showPdf from '../js/pdf.js';
const router = express.Router();
router.get('/', (req, res) => {
res.render('reader');
});
router.post('/', (req, res) => {
res.redirect('/reader');
showPdf(req.body);
});
export default router;
I couldn't easily import the pdf.js script, it would throw an error saying that I can't use import outside a module etc... I'm new in node.js and I don't know how these modules work, and how should I import and use them properly, but after doing whatever that I could do to solve the importing issues, now I'm getting an error saying that pdfjsLib is not defined in my pdf.js script
This is the pdf.js script:
pdfjsLib.GlobalWorkerOptions.workerSrc = '../node_modules/pdfjs-dist/build/pdf.worker.js';
const eventBus = new pdfjsViewer.EventBus();
const pdfLinkService = new pdfjsViewer.PDFLinkService({ eventBus });
// Get document
function showPdf(file) {
let fileReader = new FileReader();
fileReader.readAsArrayBuffer(file);
fileReader.onload = () => {
let typedArray = new Uint8Array(this.result);
pdfjsLib.getDocument(typedArray).promise.then(function (_pdfDoc) {
pdfDoc = _pdfDoc;
});
}
}
it's much larger than this, but it's how I'm using the pdfjsLib and pdfjsViewer, and I'm importing them before my pdf.js script in the HTML script tags.
Why it can't find the pdfjsLib and also the pdfjsViewer? where does the router sit on my website? is this because the router can't access the pdfjsLib global variable? how should I use a script which is dependent on some global variables like this? is this approach correct at all? I mean for transferring the selected file from input and posting it to my other page.
You are exporting a variable called router and then looking for readrouter in your import. Change the import to:
import router from '../routes/reader.js
For the pdfjsLib I do not see where you are importing that library. So doing the same thing you did for router just import the pdfjsLib library. If you are using this package its:
import pdfjsLib from 'pdfjs-dist'

Get node.js server-side object available in client-side javascript code

I played around with UiPath Orchestrator package. And the connection worked out with the installed node.js package.
Anyway now I need to implement it in my website in a way where I get access it from a simple html site.
There I struggle a bit with getting it to run. This is how I would like to use it:
index.html:
<html>
...
<button onclick="test()">Do something</button>
...
<script src="scripts.js"></script>
...
</html>
server.js: (I start with node server.js)
var http = require('http'),
fs = require('fs');
const port = 6543;
const path = require('path');
const server = http.createServer((req, res) => {
let filePath = path.join(
__dirname,
req.url === "/" ? "index.html" : req.url
);
let extName = path.extname(filePath);
let contentType = 'text/html';
switch (extName) {
case '.js':
contentType = 'text/javascript';
break;
}
console.log(`File path: ${filePath}`);
console.log(`Content-Type: ${contentType}`);
res.writeHead(200, {'Content-Type': contentType});
const readStream = fs.createReadStream(filePath);
readStream.pipe(res);
});
server.listen(port, (err) => {
...
});
scripts.js:
function test() {
...
// get the orch object here without defining it here as it contains credentials
var orchestratorInstance = new Orchestrator({
tenancyName: string (optional),
usernameOrEmailAddress: string (required),
password: string (required),
hostname: string (required),
isSecure: boolean (optional, default=true),
port: integer (optional, [1..65535], default={443,80} depending on isSecure),
invalidCertificate: boolean (optional, default=false),
connectionPool: number (optional, 0=unlimited, default=1)
});
}
This works. So the test function is fired.
But now I would like to get the Orchestrator object (like shown here https://www.npmjs.com/package/uipath-orchestrator).
How to do it in the best way?
Maybe just pass-through that object to the scripts.js file itself? But how to do that with window or global and would that be a proper solution?
I need the server-side generated object as it contains credentials that may not be delivered to client-side.
A crude example, but to give an idea, of embedding a script file into html.
Ideally you'd use express to load a webpage, but this is purely to describe usecase.
You could do the same with an end body tag, or end head tag.
const http = require('http'),
const fs = require('fs');
const html = fs.readFileSync('./file.html');
const obj = fs.readFileSync('./script.js');
const htmlWithScript = html.replace(/\<\/html\s*\>/,`<script>${obj}</script></html>`);
// const htmlWithScript = `<html><head><script>${obj}</script></head><body> my html stuff ...</body></html>`
http.createServer(function(request, response) {
response.writeHeader(200, {"Content-Type": "text/html"});
response.write(htmlWithScript);
response.end();
}).listen(8000);
It works perfectly together with UiPath-Orchestrator nodejs.
So just use:
var util = require('util');
var Orchestrator = require('uipath-orchestrator');
var orchestrator = new Orchestrator({
tenancyName: 'test', // The Orchestrator Tenancy
usernameOrEmailAddress: 'xxx',// The Orchestrator login
password: 'yyy', // The Orchestrator password
hostname: 'host.company.com', // The instance hostname
isSecure: true, // optional (defaults to true)
port: 443, // optional (defaults to 80 or 443 based on isSecure)
invalidCertificate: false, // optional (defaults to false)
connectionPool: 5 // options, 0=unlimited (defaults to 1)
});
var apiPath = '/odata/Users';
var apiQuery = {};
orchestrator.get(apiPath, apiQuery, function (err, data) {
if (err) {
console.error('Error: ' + err);
}
console.log('Data: ' + util.inspect(data));
});
and extract the orchestrator object to your node.js code.
It also works together with Orchestrator Cloud (if you don't have the on-prem), see here for more info.

ExpressJS and PDFKit - generate a PDF in memory and send to client for download

In my api router, there is a function called generatePDF which aims to use PDFKit module to generate a PDF file in memory and send to client for download instead of displaying only.
In api.js:
var express = require('express');
var router = express.Router();
const PDFDocument = require('pdfkit');
router.get('/generatePDF', async function(req, res, next) {
var myDoc = new PDFDocument({bufferPages: true});
myDoc.pipe(res);
myDoc.font('Times-Roman')
.fontSize(12)
.text(`this is a test text`);
myDoc.end();
res.writeHead(200, {
'Content-Type': 'application/pdf',
'Content-disposition': 'attachment;filename=test.pdf',
'Content-Length': 1111
});
res.send( myDoc.toString('base64'));
});
module.exports = router;
This does not work. The error message is (node:11444) UnhandledPromiseRejectionWarning: Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client.
How can I go about fixing the issue and getting it work?
Also, a relevant question would be how I can separate the business logic of PDF generation from the router and chain them up?
Complete solution.
var express = require('express');
var router = express.Router();
const PDFDocument = require('pdfkit');
router.get('/generatePDF', async function(req, res, next) {
var myDoc = new PDFDocument({bufferPages: true});
let buffers = [];
myDoc.on('data', buffers.push.bind(buffers));
myDoc.on('end', () => {
let pdfData = Buffer.concat(buffers);
res.writeHead(200, {
'Content-Length': Buffer.byteLength(pdfData),
'Content-Type': 'application/pdf',
'Content-disposition': 'attachment;filename=test.pdf',})
.end(pdfData);
});
myDoc.font('Times-Roman')
.fontSize(12)
.text(`this is a test text`);
myDoc.end();
});
module.exports = router;
First I recommend to create a service for the PDF kit. And then a Controller to the route that you want.
I used get-stream to make this easier.
It also answers your question to the accepted answer:
how I can separate the business logic of PDF generation from the
router and chain them up?
This is my professional solution:
import PDFDocument from 'pdfkit';
import getStream from 'get-stream';
import fs from 'fs';
export default class PdfKitService {
/**
* Generate a PDF of the letter
*
* #returns {Buffer}
*/
async generatePdf() {
try {
const doc = new PDFDocument();
doc.fontSize(25).text('Some text with an embedded font!', 100, 100);
if (process.env.NODE_ENV === 'development') {
doc.pipe(fs.createWriteStream(`${__dirname}/../file.pdf`));
}
doc.end();
const pdfStream = await getStream.buffer(doc);
return pdfStream;
} catch (error) {
return null;
}
}
}
And then the method of the Controller:
(...)
async show(req, res) {
const pdfKitService = new PdfKitService();
const pdfStream = await pdfKitService.generatePdf();
res
.writeHead(200, {
'Content-Length': Buffer.byteLength(pdfStream),
'Content-Type': 'application/pdf',
'Content-disposition': 'attachment;filename=test.pdf',
})
.end(pdfStream);
}
And finally the route:
routes.get('/pdf', FileController.show);
For those how don't want to waste RAM on buffering PDFs and send chunks right away to the client:
const filename = `Receipt_${invoice.number}.pdf`;
const doc = new PDFDocument({ bufferPages: true });
const stream = res.writeHead(200, {
'Content-Type': 'application/pdf',
'Content-disposition': `attachment;filename=${filename}.pdf`,
});
doc.on('data', (chunk) => stream.write(chunk));
doc.on('end', () => stream.end());
doc.font('Times-Roman')
.fontSize(12)
.text(`this is a test text`);
doc.end();
You can use blob stream like this.
reference: https://pdfkit.org/index.html
const PDFDocument = require('pdfkit');
const blobStream = require('blob-stream');
// create a document the same way as above
const doc = new PDFDocument;
// pipe the document to a blob
const stream = doc.pipe(blobStream());
// add your content to the document here, as usual
doc.font('fonts/PalatinoBold.ttf')
.fontSize(25)
.text('Some text with an embedded font!', 100, 100);
// get a blob when you're done
doc.end();
stream.on('finish', function() {
// get a blob you can do whatever you like with
const blob = stream.toBlob('application/pdf');
// or get a blob URL for display in the browser
const url = stream.toBlobURL('application/pdf');
iframe.src = url;
});
pipe all your pdf data to your blob and then write it to a file or url.
or u can store the pdf directly into cloud storage like firebase storage and send download link to client.
If you want to generate pdfs dynamically then you can also try out html-pdf library in node which allows you to create a pdf from html template and add dynamic data in it. Also it is more reliable than pdfkit
https://www.npmjs.com/package/html-pdf
Also refer this link
Generate pdf file using pdfkit and send it to browser in nodejs-expressjs

Hapi showing undefined variable inside handler

I'm using Hapi.js for a project and a config variable that I'm passing to my handler is coming up as undefined when I call my route. What am I doing wrong?
server.js
var Hapi = require('hapi');
var server = new Hapi.Server('0.0.0.0', 8080);
// passing this all the way to the handler
var config = {'number': 1};
var routes = require('./routes')(config);
server.route(routes);
server.start();
routes.js
var Home = require('../controllers/home');
module.exports = function(config) {
var home = new Home(config);
var routes = [{
method: 'GET',
path: '/',
handler: home.index
}];
return routes;
}
controllers/home.js
var Home = function(config) {
this.config = config;
}
Home.prototype.index = function(request, reply) {
// localhost:8080
// I expected this to output {'number':1} but it shows undefined
console.log(this.config);
reply();
}
module.exports = Home;
The issue is with the ownership of this. The value of this within any given function call is determined by how the function is called not where the function is defined. In your case above this was referring to the global this object.
You can read more on that here: What does "this" mean?
In short the solution to the problem is to change routes.js to the following:
var Home = require('../controllers/home');
module.exports = function(config) {
var home = new Home(config);
var routes = [{
method: 'GET',
path: '/',
handler: function(request, reply){
home.index(request, reply);
}
}];
return routes;
}
I've tested this and it works as expected. On a side note, you're missing out on a lot of hapi functionality by structuring your code in this way, I generally use plugins to register routes instead of requiring all routes as modules and using server.route().
See this project, feel free to open an issue if you have further questions on this: https://github.com/johnbrett/hapi-level-sample

Categories