Lambda function does not publish to SNS or update dynamodb - javascript

I am trying to update dynamodb and send SNS notification from Lambda function using the following code. Even though function execute successfully SNS notification does not happen. SNS log does not show any entry either. SNS from console to the same ARN works. Checked to update dynamodb that does not work either. Required IAM role policies for Dynamodb and SNS publish are created. Any pointers or way to debug will help.
'use strict';
console.log('Loading function');
var AWS = require("aws-sdk");
var sns = new AWS.SNS();
AWS.config.update({
region: "us-east-1"
});
var docClient = new AWS.DynamoDB.DocumentClient();
exports.handler = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
console.log('Received event: ', JSON.stringify(event, null, 2));
console.log('Received context: ', JSON.stringify(context, null, 2));
// console.log('Cognito id: ', context.identity.cognitoIdentityId);
+console.log("Start .................." + new Date().toString());+
console.log("Notifying....:");
sns.publish({
TargetArn: "arn:aws:sns:us-east-1:796915632304:endpoint/GCM/JaybMe/b65794ea-d30d-38a8-8450-e0ff3b877bfc",
Message: "Simple notification message "
}, function(err, data) {
if(err) {
console.error('error publishing to SNS');
context.callbackWaitsForEmptyEventLoop = false;
callback(err);
//context.fail(err);
} else {
console.info('Message published to SNS');
context.callbackWaitsForEmptyEventLoop = false;
callback(null, "SUCCESS");
}
});
console.log('Finished function');
context.callbackWaitsForEmptyEventLoop = false;
callback(null, "SUCCESS");
}
Tested with run time 4.3 and 6.10 but same behaviour
thanks
Sajeev

It reminds me of two possible cases;
First, maybe lambda function terminates itself just before sns client finishes sending the message. you better use await or promise() properties of javascript.
Second, maybe you have put your lambda in a VPC. look at a similar case here.

Related

How would I set the status of an agent from "Missed" to "Available" in Amazon Connect?

we are using Amazon Connect and we are running into an issue.
On the service-desk, we have a "standby shift", this means the user that is in that queue/shift is the only on-call. However, if he/she misses said call. They are stuck in the "Missed" state within the Connect CCP. I already tried the custom CCP method, however this does not fit our requirements yet.
Our second option is making a lambda that gets executed on a missed call (Right before the disconnect of a user) to remove the "Missed" state and set it on "Available". This is what I tried to do so:
var AWS = require('aws-sdk');
exports.handler = async (event) => {
var connect = new AWS.Connect();
let agentARN = event['Details']['Parameters']['agent'];
var params = {
AgentStatusId: 'Available',
InstanceId: 'arn:aws:connect:eu-central-1:ID:ARN',
UserId: agentARN
};
let errors;
connect.putUserStatus(params, function(err, data) {
if (err) errors = err.stack; // an error occurred
else errors = data; // successful response
});
const response = {
statusCode: 200,
body: errors
};
return response;
};
Any ideas?

Is there any error in my aws Lambda function?

Can any one just tell me what's wrong with the below code.
I am getting a timeout error for this function. even though i increase the timme in aws basic settings timeout it shows the same.
from decrypting.js i am decrypting the value and using that in index.js await decryptSecret("S3_SECRET").
Is this the proper way?
can anyone help me with the best solution for this issue
index.js
const aws = require('aws-sdk');
require('dotenv').config();
const path = require("path")
const fs = require("fs")
const { decryptSecret } = require('decrypting.js');
exports.handler = function () {
try {
const directoryPath = path.resolve(__dirname, process.env.LocalPath);
fs.readdir(directoryPath, async function (error, files) {
if (error) {
console.log("Error getting directory information");
} else {
console.log("Loading lambda Function...")
let cloudStorageFiles = [];
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.S3_ACCESS_KEY,
secretAccessKey: await decryptSecret("S3_SECRET"),
// process.env.S3_SECRET,
region: process.env.S3_REGION
})
const s3 = new aws.S3();
const response = await s3.listObjectsV2({
Bucket: 'xxxxx',
Prefix: 'xxxxx'
}, function (err, data) {
if (err) {
console.log(err, err.stack);
} else {
var contents = data.Contents;
contents.forEach((content) => {
cloudStorageFiles.push(content.Key);
});
}
}).promise();
console.log('First-Cloud-File-Storage:', cloudStorageFiles)
// return cloudStorageFiles
};
console.log("Lambda function ended")
});
// return `Lambda function successfully completed`
} catch (error) {
console.log("Ooops...Error!", error)
};
};
decrypting.js
const aws = require('aws-sdk');
aws.config.update({ region: 'us-east-1' });
const kms = new aws.KMS();
const decrypted = {};
exports.decryptSecret = async function (secretName) {
if (decrypted[secretName]) {
console.log('returning cached secret-name:' + secretName);
return decrypted[secretName]
}
console.log('decrypting:' + secretName);
try {
const req = { CiphertextBlob: Buffer.from(process.env[secretName], 'base64') };
const data = await kms.decrypt(req).promise();
const decryptedVal = data.Plaintext.toString('ascii');
decrypted[secretName] = decryptedVal;
console.log('decryptedVal:', decryptedVal)
return decryptedVal;
} catch (error) {
console.log('decrypt error:', error);
throw error;
}
};
Error Message:
{
"errorMessage": "2021-02-10T06:48:52.723Z 5dec4413-f8db-49bd-8075-661ccf6ef1a4 Task timed out after 50.02 seconds"
}
loged output:
INFO Loading lambda Function...
INFO decryptingS3_SECRET
Your function is timing out because it does not have access to the internet. Since it is running inside of a VPC, it must be placed in a private subnet to have outbound internet access. This includes S3.
A private subnet in this case is a subnet where the default route (0.0.0.0/0) points to a NAT gateway and not an internet gateway.
Your function times out, because a Lambda function associated with a VPC has no internet access by default. From docs:
When you connect a function to a VPC in your account, the function can't access the internet unless your VPC provides access.
Subsequently, your function can't connect to the public endpoints of the S3 and KMS. To rectify this, there are two options:
place your function in private subnet (public will not work), setup NAT gateway in a public subnet and configure route tables so that your function can access internet using NAT. The process is explained here.
setup VPC endpoints for KMS and S3. This will allow your function to privately access these services without the need for internet access.

How to create a lambda function to update a new record on amplify?

How to create a lambda function to update a new record on amplify project using GraphQL ?
I have an amplify project using api GraphQL as api (also dynamoDB).
My app is a simple todo list, just to study, so I'm saving todos on my dynamoDB, that's my schema:
type Todo #model #auth(rules: [{ allow: owner }]) {
id: ID!
title: String!
status: String
}
When I create a new Todo I only set the title, not the status, I want to update the status with a default value using a lambda trigger function (I know I don't have to do that way, but I'm trying that to learn more lambda functions on amplify).
So I followed the steps from amplify docs, but now I can't figure it out what's the next step (I'm not familiar with aws lambda functions).
What I want to do something like:
for each new record
newRecord.status = 'To do'
newRecord.update()
When you first create the Todo item, it is stored as an item in a DynamodDB table. To update the todo item, you need to update the item in the DynamoDB table.
When you're updating an item in DynamoDB you need to use an AWS SDK to handle this. The simplest sdk to use is to use the AWS DynamoDB Document Client if you are using nodejs in the lambda function.
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set the region
AWS.config.update({region: 'REGION'}); //replace Region with the region you are using ('EU-WEST-1')
// Create DynamoDB document client
var docClient = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
// put the the following code in the exports.handler in your lambda function
var params = {
TableName: 'Todo-SOMERANDOMSTRING-ENV',//The name of your dynamodb table
Key: {
'id' : 'asasd123-asdsa1-12sdasads-12', // the id of your todo item
},
UpdateExpression: 'set status = :s',
ExpressionAttributeValues: {
':s' : 'To do' // what the new status should be
}
};
// and run the update function
docClient.update(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data);
}
});
Edit
Based on your comment I think you might be on the DynamoDB Lambda Triggers section (?) and your boilerplate lambda function might looks like this:
exports.handler = function (event, context) {
console.log(JSON.stringify(event, null, 2));
event.Records.forEach((record) => {
console.log(record.eventID);
console.log(record.eventName);
console.log('DynamoDB Record: %j', record.dynamodb);
});
context.done(null, 'Successfully processed DynamoDB record');
};
I haven't done this type of trigger myself before so I'm not completely sure how data in record is structured, but I think it might will be accessible like this:
record.data.id
//or
record.Item.id
You can find this out by going to the lambda console, find and open your lambda function, go to "monitoring" and then open "View logs in CloudWatch" in AWS and check the CloudWatch logs after you have created an item.
Assuming it is record.Item.id your lambda code could look like this (untested):
var AWS = require('aws-sdk');
AWS.config.update({region: 'REGION'});
var docClient = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
exports.handler = function (event, context) {
console.log(JSON.stringify(event, null, 2));
event.Records.forEach((record) => {
var params = {
TableName: 'YOUR-DB-TABLE-NAME',
Key: {
'id' : record.Item.id,
},
UpdateExpression: 'set status = :status',
ExpressionAttributeValues: {
':status' : 'To do' // what the new status should be
}
};
docClient.update(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data);
}
});
});
context.done(null, 'Successfully processed DynamoDB record');
};
I don't think this code is entirely complete, and you might have to change how/when the "context.done" function works (because I think it will run before the code finishes updating the item), but it might get you somewhat in the right direction.

Push Notification Using Amazon SNS (Node)

I am trying to implement push notification using Amazon SNS in Node. I have created a topic and published a message using below code
Create Topic
var createTopicPromise = new AWS.SNS({apiVersion: '2010-03-31'}).createTopic({Name: "TOPIC_NAME"}).promise();
// Handle promise's fulfilled/rejected states
createTopicPromise.then(
function(data) {
console.log("Topic ARN is " + data.TopicArn);
}).catch(
function(err) {
console.error(err, err.stack);
});
I got a TopicArn something like this arn:aws:sns:us-east-1:xxxxxxxxxx:TOPIC_NAME
Publish
// Load the AWS SDK for Node.js
var AWS = require('aws-sdk');
// Set region
AWS.config.update({region: 'REGION'});
// Create publish parameters
var params = {
Message: 'You Got Message!! Hello....', /* required */
TopicArn: 'arn:aws:sns:us-east-1:xxxxxxxxxx:TOPIC_NAME'
};
// Create promise and SNS service object
var publishTextPromise = new AWS.SNS({apiVersion: '2010-03-31'}).publish(params).promise();
// Handle promise's fulfilled/rejected states
publishTextPromise.then(
function(data) {
console.log("Message ${params.Message} send sent to the topic ${params.TopicArn}");
console.log("MessageID is " + data.MessageId);
}).catch(
function(err) {
console.error(err, err.stack);
});
Now the message have been published now I need to see this on my mobile. So I used subscribe code like this
var params = {
Protocol: 'application', /* required */
TopicArn: 'arn:aws:sns:us-east-1:xxxxxxxxxx:TOPIC_NAME', /* required */
Endpoint: 'MOBILE_ENDPOINT_ARN'
};
// Create promise and SNS service object
var subscribePromise = new AWS.SNS({ apiVersion: '2010-03-31' }).subscribe(params).promise();
req;
res;
// Handle promise's fulfilled/rejected states
subscribePromise.then(
function (data) {
console.log("Subscription ARN is " + data.SubscriptionArn);
}).catch(
function (err) {
console.error(err, err.stack);
});
}
My question is what is Endpoint in Subscribe params. Where should I get this? and So far Am I right ? Please help me out.
Endpoint here is the ARN of your mobile application that you need to register with AWS. Here is the snippet from the official documentation
For Amazon SNS to send notification messages to mobile endpoints,
whether it is direct or with subscriptions to a topic, you first need
to register the app with AWS.
Source : https://docs.aws.amazon.com/sns/latest/dg/mobile-push-send-register.html

Azure + Node Js Service bus persistent subscription of topic

Using node js i want to create persistent subscription for Azure service bus service topic. right now it is execute only once. Please guide me I am new to this. Thanks in advance. I am using following code to subscribe topic.
var azure = require('azure');
var azureConnection = "Endpoint=sb:My connection string"
var retryOperations = new azure.ExponentialRetryPolicyFilter();
var serviceBusService = azure.createServiceBusService(azureConnection).withFilter(retryOperations);
serviceBusService.receiveSubscriptionMessage('mytopic01', 'mytopicsub', function (error, receivedMessage) {
if (!error) {
// // // Message received and deleted
console.log(receivedMessage);
}
});
Also I don't want to use setInterval function. I want to solution if message publish to the topic it should automatically trigger subscription.
Actually, if your client application is an independent node.js application, we usually set up a cycle program to receive message from service bus in loop.
E.G.
var azure = require('azure');
var sbService = azure.createServiceBusService(<connection_string>);
function checkForMessages(sbService, queueName, callback) {
sbService.receiveSubscriptionMessage(queueName, { isPeekLock: true }, function (err, lockedMessage) {
if (err) {
if (err === 'No messages to receive') {
console.log('No messages');
} else {
callback(err);
}
} else {
callback(null, lockedMessage);
}
});
}
function processMessage(sbService, err, lockedMsg) {
if (err) {
console.log('Error on Rx: ', err);
} else {
console.log('Rx: ', lockedMsg);
sbService.deleteMessage(lockedMsg, function(err2) {
if (err2) {
console.log('Failed to delete message: ', err2);
} else {
console.log('Deleted message.');
}
})
}
}
setInterval(checkForMessages.bind(null, sbService, queueName, processMessage.bind(null, sbService)), 5000);
You can refer to the code sample in the similar scenario at GitHub provided by Azure Team.
Any further concern, please feel free to let me know.

Categories