Is there any error in my aws Lambda function? - javascript

Can any one just tell me what's wrong with the below code.
I am getting a timeout error for this function. even though i increase the timme in aws basic settings timeout it shows the same.
from decrypting.js i am decrypting the value and using that in index.js await decryptSecret("S3_SECRET").
Is this the proper way?
can anyone help me with the best solution for this issue
index.js
const aws = require('aws-sdk');
require('dotenv').config();
const path = require("path")
const fs = require("fs")
const { decryptSecret } = require('decrypting.js');
exports.handler = function () {
try {
const directoryPath = path.resolve(__dirname, process.env.LocalPath);
fs.readdir(directoryPath, async function (error, files) {
if (error) {
console.log("Error getting directory information");
} else {
console.log("Loading lambda Function...")
let cloudStorageFiles = [];
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.S3_ACCESS_KEY,
secretAccessKey: await decryptSecret("S3_SECRET"),
// process.env.S3_SECRET,
region: process.env.S3_REGION
})
const s3 = new aws.S3();
const response = await s3.listObjectsV2({
Bucket: 'xxxxx',
Prefix: 'xxxxx'
}, function (err, data) {
if (err) {
console.log(err, err.stack);
} else {
var contents = data.Contents;
contents.forEach((content) => {
cloudStorageFiles.push(content.Key);
});
}
}).promise();
console.log('First-Cloud-File-Storage:', cloudStorageFiles)
// return cloudStorageFiles
};
console.log("Lambda function ended")
});
// return `Lambda function successfully completed`
} catch (error) {
console.log("Ooops...Error!", error)
};
};
decrypting.js
const aws = require('aws-sdk');
aws.config.update({ region: 'us-east-1' });
const kms = new aws.KMS();
const decrypted = {};
exports.decryptSecret = async function (secretName) {
if (decrypted[secretName]) {
console.log('returning cached secret-name:' + secretName);
return decrypted[secretName]
}
console.log('decrypting:' + secretName);
try {
const req = { CiphertextBlob: Buffer.from(process.env[secretName], 'base64') };
const data = await kms.decrypt(req).promise();
const decryptedVal = data.Plaintext.toString('ascii');
decrypted[secretName] = decryptedVal;
console.log('decryptedVal:', decryptedVal)
return decryptedVal;
} catch (error) {
console.log('decrypt error:', error);
throw error;
}
};
Error Message:
{
"errorMessage": "2021-02-10T06:48:52.723Z 5dec4413-f8db-49bd-8075-661ccf6ef1a4 Task timed out after 50.02 seconds"
}
loged output:
INFO Loading lambda Function...
INFO decryptingS3_SECRET

Your function is timing out because it does not have access to the internet. Since it is running inside of a VPC, it must be placed in a private subnet to have outbound internet access. This includes S3.
A private subnet in this case is a subnet where the default route (0.0.0.0/0) points to a NAT gateway and not an internet gateway.

Your function times out, because a Lambda function associated with a VPC has no internet access by default. From docs:
When you connect a function to a VPC in your account, the function can't access the internet unless your VPC provides access.
Subsequently, your function can't connect to the public endpoints of the S3 and KMS. To rectify this, there are two options:
place your function in private subnet (public will not work), setup NAT gateway in a public subnet and configure route tables so that your function can access internet using NAT. The process is explained here.
setup VPC endpoints for KMS and S3. This will allow your function to privately access these services without the need for internet access.

Related

How do I use JavaScript to call the AWS Textract service to upload a local photo for identification (without S3)

I want to call the AWS Textract service to identify the numbers in a local photo in JavaScript(without S3) and I get an error
TypeError:Cannot read property 'byteLength' of undefined ': Error in' Client.send (command)
I tried to find the correct sample in the AWS SDK for JavaScript V3 official documentation but couldn't find it.
I want to know how do I modify the code to call this service
This is my code
const {
TextractClient,
AnalyzeDocumentCommand
} = require("#aws-sdk/client-textract");
// Set the AWS region
const REGION = "us-east-2"; // The AWS Region. For example, "us-east-1".
var fs = require("fs");
var res;
var imagedata = fs.readFileSync('./1.png')
res = imagedata.toString('base64')
console.log("res2")
console.log(typeof(res))
// console.log(res)
const client = new TextractClient({ region: REGION });
const params = {
Document : {
Bytes: res
}
}
console.log("params")
console.log(typeof(params))
// console.log(params)
const command = new AnalyzeDocumentCommand(params);
console.log("command")
console.log(typeof(command))
const run = async () => {
// async/await.
try {
const data = await client.send(command);
console.log(data)
// process data.
} catch (error) {
console.log("Error");
console.log(error)
// error handling.
} finally {
// finally.
}
};
run()

Microsoft Graph API - Problem to access oneDrive in route '/me/drives'

I'm trying to use Microsoft Graph API to acess oneDrive folders and archives in node.js, the authentication goes fine, and returns the calendar as well, but i have used this:
var graph = require('#microsoft/microsoft-graph-client');
require('isomorphic-fetch');
module.exports = {
getUserDetails: async function(accessToken) {
const client = getAuthenticatedClient(accessToken);
const user = await client.api('/me').get();
return user;
},
// GetEventsSnippet
getEvents: async function(accessToken) {
const client = getAuthenticatedClient(accessToken);
const events = await client
.api('/me/events')
.select('subject,organizer,start,end')
.orderby('createdDateTime DESC')
.get();
return events;
},
getDrives: async function(accessToken) {
const client = getAuthenticatedClient(accessToken);
try{
const drive = await client
.api('/me/drive/root/children')
.get();
console.log(drive);
return drive;
} catch (e) {
console.log(e);
}
}
};
function getAuthenticatedClient(accessToken) {
// Initialize Graph client
const client = graph.Client.init({
// Use the provided access token to authenticate
// requests
authProvider: (done) => {
done(null, accessToken);
}
});
console.log(client);
return client;
}
And it don't return anything... But saying that im unauthenticated
I am following this documentation: https://learn.microsoft.com/en-us/onedrive/developer/rest-api/api/drive_list?view=odsp-graph-online
How can i make this work?

AWS Kendra sdk call will not return results

I have been following the AWS-Kendra react-search app example you can find here:
https://docs.aws.amazon.com/kendra/latest/dg/deploying.html
After importing the Kendra client with:
const kendra = require('aws-sdk/clients/kendra');
const kendraClient = new kendra({apiVersion: '2019-02-03', region: 'us-east-1'});
Any call on kendraClient to any of the kendra services returns null. I have been executing queries with:
const results = kendraClient.query({ IndexId: INDEX_ID, QueryText: queryText});
Which returns a request object with null data and error fields.
I have calls to S3 which execute correctly in the same file so I do not believe it to be an authentication problem. If I had to guess it's some issue with how I created the kendra object and client, the usual
kendra = new AWS.Kendra();
doesn't work because Kendra is not part of the browser version of the SDK.
Are you trying to run js from browser directly? Here is a sample nodejs code
var kendra = require("aws-sdk/clients/kendra");
var kendraClient = new kendra({apiVersion: "2019-02-03", region: "us-west-2"});
exports.handler = function (event) {
try{
console.log("Starting....");
var params = {
IndexId: "<<Enter your indexId here>>",
QueryText: "<<Enter your queryText here>>",
QueryResultTypeFilter: "DOCUMENT",
PageNumber: 1
};
var kendraResponse = kendraClient.query(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log("Kendra result is", data); // successful response
});
const response = {
"dialogAction":
{
"fulfillmentState":"Fulfilled",
"type":"Close","message":
{
"contentType":"PlainText"
}
}
}
return response;
} catch (error) {
console.log(error)
}
};

Sharing SFTP Connection in Azure Functions

I have an azure function that sends a file to an SFTP server. It may be called multiple times so to save time connecting to the SFTP server I would like to reuse the SFTP connection.
const Client = require("ssh2-sftp-client");
const sftp = new Client();
let sftpConnected = false;
const sendToSFTP = async (data, location, context) => {
await setConn(context)
try{
await sftp.put(data, location);
}catch(err){
context.log.error('sftp put error: ' + err);
}
}
const setConn = async (context) => {
if (sftpConnected) return
try{
await sftp.connect({
host: 'myserver',
username: 'user',
passphrase: 'pwd',
});
sftpConnected = true;
}catch(err){
context.log.error('sftp connect error: ' + err);
}
}
sftp.on('close', () => {
sftpConnected = false;
sftp.end();
});
sftp.on('end', () => {
sftpConnected = false;
sftp.end();
});
exports.sendToSFTP = sendToSFTP;
This works when testing locally but when deployed to Azure the close and end listeners do not seem to be called when the Azure function goes idle. When I try to use the sftp connection after a period of time there is no connection there and the call to the sftp server times out.
Has anyone solved this issue?

How to upload multiple files to GoogleDrive API using for/loop (forEach)

I use the Google Drive API to upload multiple files.
I faced with a problem running out of RAM while uploading multiples files. I try to use forEach (for loop) for my code to avoid uploading multiple files at the same time, but It doesn't work the way I expected. It always loop through the entire of list files and upload the same time.
I try to use async/await syntax to block the loop but It didn't work the way I expected.
Here is my code:
const fs = require("fs");
const readline = require("readline");
const { google } = require("googleapis");
let files = ["file1.mp4", "file2.mp4"];
const SCOPES = ["https://www.googleapis.com/auth/drive.metadata.readonly"];
const TOKEN_PATH = "token.json";
fs.readFile("credentials.json", (err, content) => {
if (err) return console.log("Error loading client secret file:", err);
// Authorize a client with credentials, then call the Google Drive API.
authorize(JSON.parse(content), uploadFiles);
});
function authorize(credentials, callback) {
const { client_secret, client_id, redirect_uris } = credentials.installed;
const oAuth2Client = new google.auth.OAuth2(
client_id,
client_secret,
redirect_uris[0]
);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, (err, token) => {
if (err) return getAccessToken(oAuth2Client, callback);
oAuth2Client.setCredentials(JSON.parse(token));
callback(oAuth2Client);
});
}
function getAccessToken(oAuth2Client, callback) {
const authUrl = oAuth2Client.generateAuthUrl({
access_type: "offline",
scope: SCOPES
});
console.log("Authorize this app by visiting this url:", authUrl);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question("Enter the code from that page here: ", code => {
rl.close();
oAuth2Client.getToken(code, (err, token) => {
if (err) return console.error("Error retrieving access token", err);
oAuth2Client.setCredentials(token);
// Store the token to disk for later program executions
fs.writeFile(TOKEN_PATH, JSON.stringify(token), err => {
if (err) console.error(err);
console.log("Token stored to", TOKEN_PATH);
});
callback(oAuth2Client);
});
});
}
async function uploadFiles(auth) {
for (file of files) {
var fileMetadata = {
name: file
};
var media = {
body: fs.createReadStream("test/" + file)
};
google.drive({ version: "v3", auth });
const result = await drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id"
},
function(err, fileid) {
if (err) {
// Handle error
console.error(err);
} else {
console.log("File Id: ", fileid.data.id);
console.log("Uploaded..:" + file);
}
}
);
console.log("Uploading file..:" + file);
}
}
I just want to ask why the loop does not upload single files each?
I try to use forEach (for loop) for my code to avoid uploading multiple files at the same time
You can't, the process is entirely asynchronous. You passed a callback as an argument to the function drive.files.create.
By the way, if you want to use async/await, you should wrap your function into a promisified one.
function myCreateFunc (fileInfos) {
return new Promise((resolve, reject) => {
google.drive.create(filesInfos, function callback(err, fileId) {
if(err)
reject(err)
resolve(fileId)
})
});
}
Even after a long time, I will post my answer because I had the same problem.
Requirements:
Enable APIS and Services for your project.
Create a service account and download the key.
If you need to upload your files into a folder, you need the folder id.
Install googleapis using npm install googleapis.
Follow these steps to get your folder id.
To find folder id, you need to provide permission to the service account user.
Share it with edit access and get the link to the folder.
You will see something like this: https://drive.google.com/drive/u/1/folders/xxxxxXXXXXXxxxxxxxXXXXXXX
In this case folder id is xxxxxXXXXXXxxxxxxxXXXXXXX
const { google } = require("googleapis");
var fs = require("fs");
const KEYFILEPATH = "path to your keyfile.json";
const SCOPES = ["https://www.googleapis.com/auth/drive"];
const auth = new google.auth.GoogleAuth({
keyFile: KEYFILEPATH,
scopes: SCOPES,
});
const uploadFiles = async (auth) => {
const driveService = google.drive({ version: "v3", auth });
let count = 0;
for (const file of fs.readdirSync(
"full file path to your images/files folder"
)) {
// Log the file name.
console.log(JSON.stringify(file));
let fileMetadata = {
name: file,
parents: ["folder id"], //Optional
};
let media = {
mimeType: "image/jpeg",
body: fs.createReadStream(
`full file path to your images/files folder/${file}`
),
};
const task = driveService.files.create({
resource: fileMetadata,
media: media,
fields: "id",
});
try {
await task;
count = count + 1;
} catch (e) {
console.log(e);
return null;
}
}
// log the total count of uploaded files.
console.log("Count :", count);
return;
};
uploadFiles(auth).catch(console.error);

Categories