I have an azure function that sends a file to an SFTP server. It may be called multiple times so to save time connecting to the SFTP server I would like to reuse the SFTP connection.
const Client = require("ssh2-sftp-client");
const sftp = new Client();
let sftpConnected = false;
const sendToSFTP = async (data, location, context) => {
await setConn(context)
try{
await sftp.put(data, location);
}catch(err){
context.log.error('sftp put error: ' + err);
}
}
const setConn = async (context) => {
if (sftpConnected) return
try{
await sftp.connect({
host: 'myserver',
username: 'user',
passphrase: 'pwd',
});
sftpConnected = true;
}catch(err){
context.log.error('sftp connect error: ' + err);
}
}
sftp.on('close', () => {
sftpConnected = false;
sftp.end();
});
sftp.on('end', () => {
sftpConnected = false;
sftp.end();
});
exports.sendToSFTP = sendToSFTP;
This works when testing locally but when deployed to Azure the close and end listeners do not seem to be called when the Azure function goes idle. When I try to use the sftp connection after a period of time there is no connection there and the call to the sftp server times out.
Has anyone solved this issue?
Related
My Node.js server receives a stream of data from an external API.
I serve my client after receiving the data completely. Like this,
async function getFile(req, res) {
const { id } = req.body;
const file = await get(process.env.FILE_API_URL + id);
res.send(file);
}
But instead of waiting to receive the whole stream, I would like to stream it to the client as soon as I have some data. Kind of like this,
function getFile(req, res) {
const { id } = req.body;
const stream = get(process.env.FILE_API_URL + id);
stream.on('data', (data) > {
res.write(data);
});
stream.on('end', res.end);
}
How can I implement this?
Can any one just tell me what's wrong with the below code.
I am getting a timeout error for this function. even though i increase the timme in aws basic settings timeout it shows the same.
from decrypting.js i am decrypting the value and using that in index.js await decryptSecret("S3_SECRET").
Is this the proper way?
can anyone help me with the best solution for this issue
index.js
const aws = require('aws-sdk');
require('dotenv').config();
const path = require("path")
const fs = require("fs")
const { decryptSecret } = require('decrypting.js');
exports.handler = function () {
try {
const directoryPath = path.resolve(__dirname, process.env.LocalPath);
fs.readdir(directoryPath, async function (error, files) {
if (error) {
console.log("Error getting directory information");
} else {
console.log("Loading lambda Function...")
let cloudStorageFiles = [];
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.S3_ACCESS_KEY,
secretAccessKey: await decryptSecret("S3_SECRET"),
// process.env.S3_SECRET,
region: process.env.S3_REGION
})
const s3 = new aws.S3();
const response = await s3.listObjectsV2({
Bucket: 'xxxxx',
Prefix: 'xxxxx'
}, function (err, data) {
if (err) {
console.log(err, err.stack);
} else {
var contents = data.Contents;
contents.forEach((content) => {
cloudStorageFiles.push(content.Key);
});
}
}).promise();
console.log('First-Cloud-File-Storage:', cloudStorageFiles)
// return cloudStorageFiles
};
console.log("Lambda function ended")
});
// return `Lambda function successfully completed`
} catch (error) {
console.log("Ooops...Error!", error)
};
};
decrypting.js
const aws = require('aws-sdk');
aws.config.update({ region: 'us-east-1' });
const kms = new aws.KMS();
const decrypted = {};
exports.decryptSecret = async function (secretName) {
if (decrypted[secretName]) {
console.log('returning cached secret-name:' + secretName);
return decrypted[secretName]
}
console.log('decrypting:' + secretName);
try {
const req = { CiphertextBlob: Buffer.from(process.env[secretName], 'base64') };
const data = await kms.decrypt(req).promise();
const decryptedVal = data.Plaintext.toString('ascii');
decrypted[secretName] = decryptedVal;
console.log('decryptedVal:', decryptedVal)
return decryptedVal;
} catch (error) {
console.log('decrypt error:', error);
throw error;
}
};
Error Message:
{
"errorMessage": "2021-02-10T06:48:52.723Z 5dec4413-f8db-49bd-8075-661ccf6ef1a4 Task timed out after 50.02 seconds"
}
loged output:
INFO Loading lambda Function...
INFO decryptingS3_SECRET
Your function is timing out because it does not have access to the internet. Since it is running inside of a VPC, it must be placed in a private subnet to have outbound internet access. This includes S3.
A private subnet in this case is a subnet where the default route (0.0.0.0/0) points to a NAT gateway and not an internet gateway.
Your function times out, because a Lambda function associated with a VPC has no internet access by default. From docs:
When you connect a function to a VPC in your account, the function can't access the internet unless your VPC provides access.
Subsequently, your function can't connect to the public endpoints of the S3 and KMS. To rectify this, there are two options:
place your function in private subnet (public will not work), setup NAT gateway in a public subnet and configure route tables so that your function can access internet using NAT. The process is explained here.
setup VPC endpoints for KMS and S3. This will allow your function to privately access these services without the need for internet access.
I'm trying to use Microsoft Graph API to acess oneDrive folders and archives in node.js, the authentication goes fine, and returns the calendar as well, but i have used this:
var graph = require('#microsoft/microsoft-graph-client');
require('isomorphic-fetch');
module.exports = {
getUserDetails: async function(accessToken) {
const client = getAuthenticatedClient(accessToken);
const user = await client.api('/me').get();
return user;
},
// GetEventsSnippet
getEvents: async function(accessToken) {
const client = getAuthenticatedClient(accessToken);
const events = await client
.api('/me/events')
.select('subject,organizer,start,end')
.orderby('createdDateTime DESC')
.get();
return events;
},
getDrives: async function(accessToken) {
const client = getAuthenticatedClient(accessToken);
try{
const drive = await client
.api('/me/drive/root/children')
.get();
console.log(drive);
return drive;
} catch (e) {
console.log(e);
}
}
};
function getAuthenticatedClient(accessToken) {
// Initialize Graph client
const client = graph.Client.init({
// Use the provided access token to authenticate
// requests
authProvider: (done) => {
done(null, accessToken);
}
});
console.log(client);
return client;
}
And it don't return anything... But saying that im unauthenticated
I am following this documentation: https://learn.microsoft.com/en-us/onedrive/developer/rest-api/api/drive_list?view=odsp-graph-online
How can i make this work?
I'm trying to make a test bot that, upon being chatted to responds with a (meaningless) string gotten from a JSON object through another API
Code:
var restify = require('restify');
var builder = require('botbuilder');
var request = require('request-promise');
// Setup Restify Server
var server = restify.createServer();
server.listen(process.env.port || process.env.PORT || 3978, function () {
console.log('%s listening to %s', server.name, server.url);
});
// Create chat connector for communicating with the Bot Framework Service
var connector = new builder.ChatConnector({
appId: process.env.MicrosoftAppId,
appPassword: process.env.MicrosoftAppPassword
});
// Listen for messages from users
server.post('/api/messages', connector.listen());
// Receive messages from the user and respond by echoing each message back (prefixed with 'You said:')
var bot = new builder.UniversalBot(connector, function (session) {
var text = await MyRequest()
session.send("%s", text);
});
async function MyRequest() {
var options = {
uri: "https://jsonplaceholder.typicode.com/posts/1",
method: "GET",
json: true
}
try {
var result = await request(options);
return result;
} catch (err) {
console.error(err);
}
}
The problem is the bot var isn't an asynch function, so I can't put await in it. If I remove await, the bot replies with Object Promise. I'm fairly inexperienced in JS overall, so can I get any pointers?
e: The Request part works great, I've tested it alone in a different js program
Have you tried this. If you are using ES6 compatible Node environment this should work
var bot = new builder.UniversalBot(connector, async function (session) {
// Use JSON.stringify() if MyRequest Promise will resolve a object
var text = await MyRequest()
session.send("%s", text);
});
If async/await isn't possible, how about returning a promise? like below:
function MyRequest() {
var options = {
uri: "https://jsonplaceholder.typicode.com/posts/1",
method: "GET",
json: true
}
return request(options);
}
And use Promise.then to act on the result, like so:
var bot = new builder.UniversalBot(connector, function (session) {
MyRequest().then(function(text) {
session.send("%s", text);
}).catch(function(error) {
session.send("%s", error);
});
});
I have a function that triggers on firebase database onWrite. The function body use two google cloud apis (DNS and Storage).
While the function is running and working as expected (mostly), the issue is that the Socket hang up more often than I'd like. (50%~ of times)
My questions are:
Is it similar to what the rest of the testers have experienced? Is it a well known issue that is outstanding or expected behavior?
the example code is as follows:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {credentials} = functions.config().auth;
credentials.private_key = credentials.private_key.replace(/\\n/g, '\n');
const config = Object.assign({}, functions.config().firebase, {credentials});
admin.initializeApp(config);
const gcs = require('#google-cloud/storage')({credentials});
const dns = require('#google-cloud/dns')({credentials});
const zoneName = 'applambda';
const zone = dns.zone(zoneName);
exports.createDeleteDNSAndStorage = functions.database.ref('/apps/{uid}/{appid}/name')
.onWrite(event => {
// Only edit data when it is first created.
const {uid, appid} = event.params;
const name = event.data.val();
const dbRef = admin.database().ref(`/apps/${uid}/${appid}`);
if (event.data.previous.exists()) {
console.log(`already exists ${uid}/${appid}`);
return;
}
// Exit when the data is deleted.
if (!event.data.exists()) {
console.log(`data is being deleted ${uid}/${appid}`);
return;
}
const url = `${name}.${zoneName}.com`;
console.log(`data: ${uid}/${appid}/${name}\nsetting up: ${url}`);
setupDNS({url, dbRef});
setupStorage({url, dbRef});
return;
});
function setupDNS({url, dbRef}) {
// Create an NS record.
let cnameRecord = zone.record('cname', {
name: `${url}.`,
data: 'c.storage.googleapis.com.',
ttl: 3000
});
zone.addRecords(cnameRecord).then(function() {
console.log(`done setting up zonerecord for ${url}`);
dbRef.update({dns: url}).then(res => console.log(res)).catch(err => console.log(err));
}).catch(function(err) {
console.error(`error setting up zonerecord for ${url}`);
console.error(err);
});
}
function setupStorage({url, dbRef}) {
console.log(`setting up storage bucket for ${url}`);
gcs.createBucket(url, {
website: {
mainPageSuffix: `https://${url}`,
notFoundPage: `https://${url}/404.html`
}
}).then(function(res) {
let bucket = res[0];
console.log(`created bucket ${url}, setting it as public`);
dbRef.update({storage: url}).then(function() {
console.log(`done setting up bucket for ${url}`);
}).catch(function(err) {
console.error(`db update for storage failed ${url}`);
console.error(err);
});
bucket.makePublic().then(function() {
console.log(`bucket set as public for ${url}`);
}).catch(function(err) {
console.error(`setting public for storage failed ${url}`);
console.error(err);
});
}).catch(function(err) {
console.error(`creating bucket failed ${url}`);
console.error(err);
});
}
I'm thinking your function needs to return a promise so that all the other async work has time to complete before the function shuts down. As it's shown now, your functions simply returns immediately without waiting for the work to complete.
I don't know the cloud APIs you're using very well, but I'd guess that you should make your setupDns() and setupStorage() return the promises from the async work that they're doing, then return Promise.all() passing those two promises to let Cloud Functions know it should wait until all that work is complete before cleaning up the container that's running the function.