Unable to deploy the Node.js AWS Lambda function to Docker - javascript

I am developing a REST API with Node.Js. my technology stack is AWS Lambda, API Gateway and RDS (MySQL). Below is my code
roles.js
const mysql = require('mysql');
const con = mysql.createConnection({
host : "*****.rds.amazonaws.com",
user : "*****",
password : "*****",
port : 3306,
database : "******"
});
exports.lambdaHandler = (event, context, callback) => {
// allows for using callbacks as finish/error-handlers
context.callbackWaitsForEmptyEventLoop = false;
const sql = "select * from role";
con.query(sql, function (err, result) {
if (err) throw err;
var response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify(result),
"isBase64Encoded": false
};
callback(null, response)
});
};
exports.selectRoleByIDHandler = (event, context, callback) => {
const { id } = event.queryStringParameters;
console.log("id", id);
// allows for using callbacks as finish/error-handlers
context.callbackWaitsForEmptyEventLoop = false;
const sql = "select * from role where idRole = "+id;
con.query(sql, function (err, result) {
if (err) throw err;
var response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify(result),
"isBase64Encoded": false
};
callback(null, response)
});
};
Below is my template.yaml
AWSTemplateFormatVersion: '2010-09-09'
Transform: AWS::Serverless-2016-10-31
Description: >
node2
Sample SAM Template for node2
# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst
Globals:
Function:
Timeout: 100
VpcConfig:
SecurityGroupIds:
- sg-sdsdsdsd
SubnetIds:
- subnet-ssdsds
Resources:
HelloWorldFunction:
Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction
Properties:
CodeUri: hello-world/
Handler: app.lambdaHandler
Runtime: nodejs14.x
Events:
HelloWorld:
Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api
Properties:
Path: /hello
Method: get
Role: !GetAtt LambdaRole.Arn
RoleFunction:
Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction
Properties:
CodeUri: hello-world/
Handler: roles.lambdaHandler
Runtime: nodejs14.x
Events:
HelloWorld:
Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api
Properties:
Path: /roles
Method: get
Role: !GetAtt LambdaRole.Arn
SelectRolesByIDFunction:
Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction
Properties:
CodeUri: hello-world/
Handler: roles.selectRoleByIDHandler
Runtime: nodejs14.x
Events:
HelloWorld:
Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api
Properties:
Path: /selectRoleByIDHandler
Method: get
Role: !GetAtt LambdaRole.Arn
LambdaRole:
Type: 'AWS::IAM::Role'
Properties:
AssumeRolePolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Principal:
Service:
- lambda.amazonaws.com
Action:
- 'sts:AssumeRole'
Path: /
ManagedPolicyArns:
- arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole
Policies:
- PolicyName: root
PolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Action:
- ec2:DescribeNetworkInterfaces
- ec2:CreateNetworkInterface
- ec2:DeleteNetworkInterface
- ec2:DescribeInstances
- ec2:AttachNetworkInterface
Resource: '*'
When I try to execute the sam local invoke, i get the following error
Error: You must provide a function logical ID when there are more than one functions in your template. Possible options in your template: ['HelloWorldFunction', 'RoleFunction', 'SelectRolesByIDFunction']
Now, I have 2 questions.
How to solve this issue?
Is it a bad practice to have more than one function in a single file in AWS Lambda?

Looks like you are trying to build Lambda functions via the AWS SDK for Javascipt. Have you looked at the AWS examples in the AWS SDK for JavaScript V3 DEV Guide. There are end to end instructions on how to build Lambda function using the JS API that can help you.
See this topic and the child topics in the TOC:
Cross-service examples for the AWS SDK for JavaScript

I found the issue, I had to invoke this as sam local invoke HelloWorldFunction . The HelloWorldFunction is the function name I need to deploy.

Related

NodeJS add the authorization button to Swagger Documentation

I need to be able to add the following button to Swagger's UI interface so that the testers can add the "Bearer token" header and test the apis.
My swagger's option definition is:
module.exports = {
definition: {
openapi: "3.0.3",
info: {
title: "APIs",
version: "1.0.0",
},
servers: [
{
url: `http://localhost:${process.env.PORT}`
}
],
securityDefinitions: {
bearerAuth: {
type: 'apiKey',
name: 'Authorization',
scheme: 'bearer',
in: 'header',
},
}
},
apis: ["./routes/*.js", "app.js"],
};
and my endpoint is as follows:
/**
* #swagger
* /api/users/test:
* post:
* security:
* - Bearer: []
* summary: test authorization
* tags: [User]
* description: use to test authorization JWT
* responses:
* '200':
* description: success
* '500':
* description: Internal server error
*/
router.post('/test', verifyJWT(), async (req, res) => {
res.send('hi');
})
Are you using OAS v3? You have errors in your declarations, for example securityDefinitions is now called securitySchemes and it is inside components.
Check https://swagger.io/docs/specification/authentication/
When you fix your schema, then you add a security property to your path to protect it with a security schema so that you'll get the green Authorize button.
components:
securitySchemes:
BearerAuth:
type: http
scheme: bearer
paths:
/api/users/test:
post:
security:
- BearerAuth: []

"Error 400: The template parameters are invalid" - Running Dataflow Flex Template via Google Cloud Function

I'm trying to run a Dataflow Flex Template job via a Cloud Function which is triggered by a Pub/Sub message.
However while the Dataflow pipeline works fine when running it from gcloud / locally in the command line / via the Google Flex Template API Explorer, when I try to launch it as a Google Cloud Function I keep running up against this error:
"Problem running dataflow template, error was: { Error: The template parameters are invalid."
To note, I'm using all the same parameters as when I was testing it, and I've attempted to remove / reformat them all on a few occasions, and have passed them in as their actual strings rather than environment variables.
Now I had seen that this was due to incorrect formatting and/or using the incorrect parameter key names themselves - possibly because I've used Python to build the pipeline, and Node.js to run the cloud function i.e. "temp_location" vs "tempLocation" or similar.
Or I was wondering whether having set google_cloud_options in my main.py file doesn't allow me to specify them as a JS friendly argument when using the template at runtime?
But having tried various combinations this doesn't make much difference. So my question is: does anyone know a fix for this issue, alternatively does anyone know how to even try to debug which parameters are wrong? Do I need to be passing my GOOGLE_APPLICATION_CREDENTIALS service-account.json file as well?
Is the problem simply that the Cloud Function needs to be written in Python as well as the Dataflow pipeline?
Google Cloud Function
# trigger.js
const { google } = require("googleapis");
exports.triggerFlexTemplate = (event, context) => {
// PubSub message payloads and attributes not used.
const pubsubMessage = event.data;
console.log(Buffer.from(pubsubMessage, "base64").toString());
console.log(event.attributes);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
console.error("Error occurred: " + err.toString());
throw new Error(err);
}
const dataflow = google.dataflow({ version: "v1b3", auth: authClient });
const date = new Date().toISOString().slice(0, 10);
dataflow.projects.locations.flexTemplates.launch({
projectId: projectId,
location: process.env.region,
resource: {
launchParameter: {
jobName: `rank-stream-to-bigquery-${date}`,
containerSpecGcsPath: `gs://${process.env.bucket}/dataflow_templates/rank_stream_to_bigquery.json`,
parameters: {
api_key: process.env.api_key,
date: date,
campaign: process.env.campaign,
se: process.env.se,
domain: process.env.domain,
project: process.env.project,
dataset: process.env.dataset,
table_name: process.env.table_name,
runner: process.env.runner,
experiments: "disable_flex_template_entrypoint_override",
staging_location: `gs://${process.env.bucket}/staging`,
temp_location: `gs://${process.env.bucket}/temp`,
service_account_email: process.env.service_account_email,
}
}
}
},
function (err, response) {
if (err) {
console.error("Problem running dataflow template, error was: ", err);
} console.log("Dataflow template response: ", response);
});
});
};
Dataflow Pipeline
# main.py
from __future__ import print_function, absolute_import
import argparse
import logging
import sys
import apache_beam as beam
from apache_beam.io.gcp.internal.clients import bigquery
from apache_beam.metrics.metric import Metrics
from apache_beam.options.pipeline_options import PipelineOptions, GoogleCloudOptions, StandardOptions, SetupOptions, WorkerOptions
import datetime as dt
from datetime import timedelta, date
import time
import re
logging.getLogger().setLevel(logging.INFO)
SCHEMA = {'fields': [{ 'name': 'date', 'type': 'DATE', 'mode': 'REQUIRED' },
{ 'name': 'url', 'type': 'STRING', 'mode': 'NULLABLE' },
{ 'name': 'lp', 'type': 'STRING', 'mode': 'NULLABLE' },
{ 'name': 'keyword', 'type': 'STRING', 'mode': 'REQUIRED' },
{ 'name': 'se', 'type': 'STRING', 'mode': 'REQUIRED' },
{ 'name': 'se_name', 'type': 'STRING', 'mode': 'REQUIRED' },
{ 'name': 'search_volume', 'type': 'INTEGER', 'mode': 'NULLABLE' },
{ 'name': 'rank', 'type': 'INTEGER', 'mode': 'NULLABLE' }]}
class GetAPI():
def __init__(self, data={}):
self.num_api_errors = Metrics.counter(self.__class__, 'num_api_errors')
self.data = data
def get_job(self):
import requests
params = dict(
key = self.data.api_key,
date = self.data.date,
campaign_id = self.data.campaign,
se_id = self.data.se,
domain = self.data.domain,
output = 'json'
)
endpoint = "https://www.rankranger.com/api/v2/?rank_stats"
logging.info("Endpoint: {}".format(str(endpoint)))
try:
res = requests.get(endpoint, params)
if res.status_code == 200:
json_data = res.json()
if 'result' in json_data:
response = json_data.get('result')
filtered = [{k: v for k, v in d.items() if v != '-' and len(v) != 0} for d in response]
return filtered
except Exception as e:
self.num_api_errors.inc()
logging.error('Exception: {}'.format(e))
logging.error('Extract error on "%s"', 'Rank API')
def format_dates(api):
import datetime as dt
api['date'] = dt.datetime.strptime(api['date'], "%m/%d/%Y").strftime("%Y-%m-%d")
return api
def run(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('--api_key',
type=str,
help='API key for Rank API.')
parser.add_argument('--date',
type=str,
help='Run date in YYYY-MM-DD format.')
parser.add_argument('--campaign',
type=str,
help='Campaign ID for Rank API')
parser.add_argument('--se',
type=str,
help='Search Engine ID for Rank API')
parser.add_argument('--domain',
type=str,
help='Domain for Rank API')
parser.add_argument('--project',
type=str,
help='Your GCS project.')
parser.add_argument('--dataset',
type=str,
help='BigQuery Dataset to write tables to. Must already exist.')
parser.add_argument('--table_name',
type=str,
help='The BigQuery table name. Should not already exist.')
parser.add_argument('--bucket',
type=str,
help='GCS Bucket name to save the temp + staging folders to')
parser.add_argument('--runner',
type=str,
help='Type of DataFlow runner.')
args, pipeline_args = parser.parse_known_args(argv)
options = PipelineOptions(pipeline_args)
wo = options.view_as(WorkerOptions) # type: WorkerOptions
wo.worker_zone = "europe-west1-b"
google_cloud_options = options.view_as(GoogleCloudOptions)
google_cloud_options.project = args.project
google_cloud_options.staging_location = "gs://{0}/staging".format(args.bucket)
google_cloud_options.temp_location = "gs://{0}/temp".format(args.bucket)
google_cloud_options.region = 'europe-west2'
options.view_as(StandardOptions).runner = args.runner
p = beam.Pipeline(options=options)
api = (
p
| 'create' >> beam.Create(GetAPI(data=args).get_job())
| 'format dates' >> beam.Map(format_dates)
)
BQ = (api | "WriteToBigQuery" >> beam.io.WriteToBigQuery(
table=args.table_name,
dataset=args.dataset,
project=args.project,
schema=SCHEMA,
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND))
p.run()
if __name__ == '__main__':
run()
Any help would be hugely appreciated!

Delete Item from DynamoDB not deleting

I can't for the life of me figure out why this isn't actually deleting the Item from the DB. It returns with a success as if it actually deleted the item but it's not actually performing the delete.
I'm using the Javascript SDK in a lambda using serverless framework
import * as dynamoDbLib from "./libs/dynamodb-lib";
import { success, failure } from "./libs/response-lib";
import { isEmpty } from 'lodash';
export function main(event, context, callback) {
const params = {
TableName: process.env.tableName,
// 'Key' defines the partition key and sort key of the item to be removed
// - 'tagId': path parameter
Key: {
tagId: event.pathParameters.id
}
};
try {
dynamoDbLib.call("delete", params).then((error) => {
if(!isEmpty(error)){
console.log(error)
callback(null, failure({ status: false, error }));
}else{
callback(null, success({ status: 204 }));
}
});
} catch (e) {
console.log(e)
callback(null, failure({ status: false }));
}
}
The dynamodb-lib file looks like this
import AWS from "aws-sdk";
AWS.config.update({ region: "us-east-1" });
export function call(action, params) {
const dynamoDb = new AWS.DynamoDB.DocumentClient();
return dynamoDb[action](params).promise();
}
Every other call so far, put and scan (to list all) are working just fine, delete is the only one so far that's not actually doing anything
EDIT: This is my table structure by the way
Resources:
NametagsTable:
Type: AWS::DynamoDB::Table
Properties:
# Generate a name based on the stage
TableName: ${self:custom.stage}-nametags
AttributeDefinitions:
- AttributeName: tagId
AttributeType: S
KeySchema:
- AttributeName: tagId
KeyType: HASH
# Set the capacity based on the stage
ProvisionedThroughput:
ReadCapacityUnits: ${self:custom.tableThroughput}
WriteCapacityUnits: ${self:custom.tableThroughput}
Here is the function in serverless.yml
delete:
# Defines an HTTP API endpoint that calls the main function in delete.js
# - path: url path is /tags/{id}
# - method: DELETE request
handler: delete.main
events:
- http:
path: tags/{id}
method: delete
cors: true

Loopback and Stripe Webhooks

I currently have a loopback project setup, and I am trying to receive webhooks from stripe.
My current Remote method looks like the following:-
Stripeconnect.stripeWebhook = function(msg, cb) {
cb(null, msg);
};
Stripeconnect.remoteMethod(
'stripeWebhook', {
description: 'This will insert the description',
http: {
path: '/stripeWebhook',
verb: 'get'
},
accepts:
{arg: 'msg', type: 'any'},
returns: {
arg: 'status',
type: 'any'
}
}
)
But in the response I receive from Stripe is:-
undefined [Function: callback]
I am unable to find any documentation online regarding Loopback and Stripe webhooks.
Would anybody be able to help, or point me in the right direction?
I have setup Stripe to point at this endpoint of the API.
Thanks in advance. If you need anymore info please let me know.
Ok so I was able to get this working by getting the response from the body:-
/**
* Receiving Webhook
* #desc Webhook EP
* #param {data} Object from Stripe.
* #return {response} response code
*/
Stripeconnect.stripeWebhook = function(request, cb) {
console.log(request.type, request.data);
};
Stripeconnect.remoteMethod(
'stripeWebhook', {
accepts: { arg: 'data', type: 'object', http: { source: 'body' } },
returns: [
{arg: 'response', type: 'any', root: true }
]
});
Which you can see from:-
accepts: { arg: 'data', type: 'object', http: { source: 'body' } },
Hopefully this helps anybody else who is having this or a similar issue.

Cross-domain Update from Sencha Touch 2 to external server

Trying to create a Sencha-Touch-2 app syncing to a Node.js server; code below.
The server uses another port on the same IP, so this is cross-domain.
(The server uses Mongoose to talk to a MongoDB back-end (not shown))
Using a JSONP Proxy as shown can read data from the server but breaks when writing:
"JSONP proxies can only be used to read data".
I guess the JSONP Proxy writer config is just to write the query and isn't used to write sync (save).
Sencha docs state an Ajax proxy can't go cross-domain, even though a
Cross-domain Ext.Ajax/Ext.data.Connection is discussed in the Sencha forums:
http://www.sencha.com/forum/showthread.php?17691-Cross-domain-Ext.Ajax-Ext.data.Connection
I have found several ways to do a (cross-domain) JSON post (e.g. Mobile Application Using Sencha Touch - JSON Request Generates Syntax Error)
but don't know how to integrate this as a writer in a proxy which syncs my store.
Sencha Touch: ScriptTagProxy url for create/update functionality
seems to offer pointers, but this is ajax and apparently unsuited for cross domain.
I've been reading this forum and elsewhere for a couple of days, but I seem to be stuck. Any help would be much appreciated.
Node.js and restify server
var server = restify.createServer({
name: 'Server',
key: fs.readFileSync(root+'/'+'privatekey.pem'),
certificate: fs.readFileSync(root+'/'+'certificate.pem')
});
server.use(restify.bodyParser());
server.use(restify.queryParser());
function getMessages(req, res, next) {
Model.find(function (err,data) {
res.setHeader('Content-Type', 'text/javascript;charset=UTF-8');
res.send(req.query["callback"] + '({"records":' + JSON.stringify(data) + '});');
});
}
function postMessage(req, res, next) {//not yet tested
var obj = new Model();
obj.name = req.params.name;
obj.description = req.params.description;
obj.date = new Date();
obj.save(function (err) {
if (err) throw err;
console.log('Saved.');
res.send('Saved.');
});
}
server.post(/^\/atapp/, postMessage);
server.get(/^\/atapp/, getMessages);
server.listen(port, ipaddr, function() {
console.log('%s: secure Node server started on %s:%d ...', Date(Date.now()), ipaddr, port);
});
Sencha Touch 2
Model
Ext.define('ATApp.model.User', {
extend: 'Ext.data.Model',
config: {
fields: [
{ name: 'name', type: 'string' },
{ name: 'description', type: 'string' },
{ name: 'date', type: 'date' },
{ name: '_id' }
...
Store
Ext.define('ATApp.store.Data', {
extend: 'Ext.data.Store',
requires: [
'ATApp.model.User',
'Ext.data.proxy.JsonP'
],
config: {
autoLoad: true,
model: 'ATApp.model.User',
storeId: 'Data',
proxy: {
type: 'jsonp',
model: 'ATApp.model.User',
url: 'https://192.168.2.45:13017/atapp',
reader: {
type: 'json',
idProperty: '_id',
rootProperty: 'records',
useSimpleAccessors: true
},
writer: {
type: 'json',
allowSingle: false,
encode: true,
idProperty: '_id',
rootProperty: 'records'
...
Controller
onNewDataRecord: function (view) {
console.log('newDataRecord');
var now = new Date();
var record = Ext.create('ATApp.model.User', {
date: now,
name: '..',
description: '..'
});
var store = Ext.data.StoreManager.lookup('Data')
record.setProxy(store.getProxy());
store.add(record);
this.activateEditor(record);
},
...
In Sencha-Touch-2 apps, the browser prohibits cross-domain AJAX calls (which violate the same-origin security policy). This pertains to different domains, different IP addresses and even different ports on the same IP address. JSONP circumvents this partly by fetching/reading data encapsulated in a script tag in a newly initiated HTTP GET message. In this way the Sencha-Touch-2 JSONP proxy can load a store (fetch/read) from a (cross domain) server. However, the JSONP proxy cannot write data. In 1 and 2 an approach is described which I have adapted.
My solution uses the JSONP proxy to fetch data, but not to store (which it can't). Instead, new records, and records to be saved or deleted are communicated with the server in a newly initiated HTTP GET message. Even though only HTTP GET is used, the server accepts message get (described in the question, above), put, del and new. Get is used by JSONP store/proxy load().
Node.js Server
//routes
server.get(/^\/atapp\/put/, putMessage);
server.get(/^\/atapp\/get/, getMessages);
server.get(/^\/atapp\/del/, delMessage);
server.get(/^\/atapp\/new/, newMessage);
function newMessage(req, res, next) {
var obj = new Model(); // Mongoose create new MongoDB object
obj.save(function (err,data) {
var x = err || data;
res.setHeader('Content-Type', 'text/javascript;charset=UTF-8');
res.send(req.query["callback"] + '({"payload":' + JSON.stringify(x) + '});');
}); // send reply to Sencha-Touch 2 callback
}
function putMessage(req, res, next) {
var q = JSON.parse(req.query.data); // no reply: add error recovery separately
var obj = Model.findByIdAndUpdate(q.key,q.val);
}
function delMessage(req, res, next) {
var key = JSON.parse(req.query.data);
Model.findByIdAndRemove(key); // no reply: add error recovery separately
}
Sencha Controller
New
onNewDataRecord: function (view) {
var control = this;
Ext.Ajax.Crossdomain.request({
url: 'https://192.168.2.45:13017/atapp/new',
rootProperty: 'payload',
scriptTag: true, // see [1](http://code.google.com/p/extjsdyntran/source/browse/trunk/extjsdyntran/WebContent/js/3rdparty/Ext.lib.Ajax.js?r=203)
success: function(r) { // process synchronously after response
var obj = r.payload;
var store = Ext.data.StoreManager.lookup('Data');
var key = obj._id // MongoDB document id
store.load(function(records, operation, success) { // add new record to store
var ind = store.findBy(function(rec,id) {
return rec.raw._id==key;
}); // identify record in store
var record = store.getAt(ind);
control.onEditDataRecord(view,record);
}, this);
}
});
Save
onSaveDataRecord: function (view, record) {
rec = {key:record.data.id, val: {}} // save template
var i; for (i in record.modified) rec.val[i]=record.data[i];
var delta = Ext.encode(rec); // only save modified fields
Ext.Ajax.Crossdomain.request({
url: 'https://192.168.2.45:13017/atapp/put',
params: {
data: delta,
},
rootProperty: 'payload',
scriptTag: true, // Use script tag transport
});
},
Delete
onDelDataRecord: function (view, record) {
var key = record.data.id;
Ext.Ajax.Crossdomain.request({ // delete document in db
url: 'https://192.168.2.45:13017/atapp/del',
params: {
data: Ext.encode(key),
format: 'json'
},
rootProperty: 'payload',
scriptTag: true, // Use script tag transport
});
record.destroy(); // delete record from store
},

Categories