I'm trying to call Redis from a Twilio Function (serverless) and I don't see incoming connections in my Redis log.
Is this setup viable?
Sample code follows:
const Redis = require('ioredis');
const fs = require('fs');
exports.handler = function (context, event, callback) {
const config = Runtime.getAssets()['config.json'].open();
let redisClientConfig = JSON.parse(config).redisConfig;
let contactCacheTime = JSON.parse(config).contactCacheTime;
if (!redisClientConfig) {
throw new Error('Redis config not set.');
}
const redisClient = new Redis(redisClientConfig);
redisClient.on('error', (err) => {
console.error(`Cannot connect to redis, reason: ${(err.message || err)}`);
});
redisClient.getex('mhn-twilio-bot-contact:'.concat(event.contactKey), 'EX', contactCacheTime)
.then((res) => {
if (!res) {
redisClient.setex('mhn-twilio-bot-contact:'.concat(event.contactKey), contactCacheTime, '<CACHED-VALUE>');
}
callback(null, { cached: res ? true : false });
})
.catch((err) => {
callback(null, { cached: false });
});
};
Related
Whenever streamedItem.eventName === "INSERT" for my table, I would like to POST streamedItem.dynamodb.NewImage (I think) to Zapier, so I can automate social media posting.
Below is my current file, and I am getting the error on Cloudwatch of ERROR connect ECONNREFUSED 127.0.0.1:3000.
/* Amplify Params - DO NOT EDIT
ENV
REGION
ZAPIER_URL
Amplify Params - DO NOT EDIT */
/**
* #type {import('#types/aws-lambda').APIGatewayProxyHandler}
*/
const http = require("http"); // or https
const defaultOptions = {
host: "localhost",
port: 3000, // or 443 for https
headers: {
"Content-Type": "application/json",
},
};
const post = (path, payload) =>
new Promise((resolve, reject) => {
const options = { ...defaultOptions, path, method: "POST" };
console.log("OPTIONS", options);
const req = http.request(options, (res) => {
let buffer = "";
res.on("data", (chunk) => (buffer += chunk));
console.log("buffer", buffer);
res.on("end", () => resolve(JSON.parse(buffer)));
});
req.on("error", (e) => reject(e.message));
req.write(JSON.stringify(payload));
req.end();
});
const zapierURL = "https://hooks.zapier.com/hooks/catch/xxxxxx/bljks0k/";
exports.handler = async (event, context) => {
for (const streamedItem of event.Records) {
if (streamedItem.eventName === "INSERT") {
console.log(
"streamedItem.dynamodb.NewImage",
streamedItem.dynamodb.NewImage
);
try {
await post(zapierURL, streamedItem.dynamodb.NewImage);
} catch (err) {
console.log("ERROR", err);
}
}
}
};
I would like the environment variables setup in a way that I can use localhost, the development server, or the production website.
I am using AWS Amplify, Next.js, and Javascript.
Full-Stack Web Application using React, Node Js, Web sockets. My project is based on ReactJs with server on Express. When trying to connect to socket.io from chrome I receive "WebSocket is closed before the connection is established" message.
"editorpage.js"
useEffect(() => {
const init = async () => {
socketRef.current = await initSocket();
socketRef.current.on('connect_error', (err) => handleErrors(err));
socketRef.current.on('connect_failed', (err) => handleErrors(err));
function handleErrors(e) {
console.log('Socket Error', e);
toast.error('Socket Connection Failed, Try Again Later.');
reactNavigator('/');
}
socketRef.current.emit(ACTIONS.JOIN, {
roomId,
username: location.state?.username,
});
// Listening for joined event
socketRef.current.on(
ACTIONS.JOINED,
({ clients, username, socketId }) => {
if (username !== location.state?.username) {
toast.success(`${username} joined the room.`);
console.log(`${username} joined`);
}
setClients(clients);
socketRef.current.emit(ACTIONS.SYNC_CODE, {
code: codeRef.current,
socketId,
});
}
);
// Listening for disconnected
socketRef.current.on(
ACTIONS.DISCONNECTED,
({ socketId, username }) => {
toast.success(`${username} left the room.`);
setClients((prev) => {
return prev.filter(
(client) => client.socketId !== socketId
);
});
}
);
};
init();
return () => {
socketRef.current?.disconnect();
socketRef.current?.off(ACTIONS.JOINED);
socketRef.current?.off(ACTIONS.DISCONNECTED);
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
This error when running on google chrome
"socket.js"
import { io } from 'socket.io-client';
export const initSocket = async () => {
const options = {
'force new connection': true,
reconnectionAttempt: 'Infinity',
timeout: 10000,
transports: ['websocket'],
};
return io(process.env.REACT_APP_BACKEND_URL, options);
};
"server.js"
const express = require('express');
const app = express();
const http = require('http');
const {
Server
} = require('socket.io');
const ACTIONS = require('./src/Actions');
const server = http.createServer(app);
const io = new Server(server);
const userSocketMap = {};
function getAllConnectedClients(roomId) {
// Map
return Array.from(io.sockets.adapter.rooms.get(roomId) || []).map(
(socketId) => {
return {
socketId,
username: userSocketMap[socketId],
};
}
);
}
io.on('connection', (socket) => {
console.log('socket connected', socket.id);
socket.on(ACTIONS.JOIN, ({
roomId,
username
}) => {
userSocketMap[socket.id] = username;
socket.join(roomId);
const clients = getAllConnectedClients(roomId);
clients.forEach(({
socketId
}) => {
io.to(socketId).emit(ACTIONS.JOINED, {
clients,
username,
socketId: socket.id,
});
});
});
socket.on(ACTIONS.CODE_CHANGE, ({
roomId,
code
}) => {
socket.in(roomId).emit(ACTIONS.CODE_CHANGE, {
code
});
});
socket.on(ACTIONS.SYNC_CODE, ({
socketId,
code
}) => {
io.to(socketId).emit(ACTIONS.CODE_CHANGE, {
code
});
});
socket.on('disconnecting', () => {
const rooms = [...socket.rooms];
rooms.forEach((roomId) => {
socket.in(roomId).emit(ACTIONS.DISCONNECTED, {
socketId: socket.id,
username: userSocketMap[socket.id],
});
});
delete userSocketMap[socket.id];
socket.leave();
});
});
const PORT = process.env.PORT || 5001;
server.listen(PORT, () => console.log(`Listening on port ${PORT}`));
I've been reading about async and await in JS and tried to implement them in my code (which I totally messed up).
Here is my JS.
var express = require('express');
var router = express.Router();
var jsforce = require('jsforce');
const SEC_TOKEN = 'SEC_TOKEN';
const USER_ID = 'USER_ID';
const PASSWORD = 'PWD';
const { default: axios } = require('axios');
router.get("/", async (req, res, next) => {
await initConnect;
await soqlData;
await slackPostTest;
});
initConnect = async () => {
var conn = new jsforce.Connection({
loginUrl: 'https://login.salesforce.com'
});
await conn.login(USER_ID, PASSWORD + SEC_TOKEN, (err, userInfo) => {
if (err)
console.log(err);
else {
console.log(userInfo.Id);
}
});
}
soqlData = async () => {
await conn.query('Select Id, Name from Account LIMIT 1', (err, data) => {
if (err)
console.log(err);
else
return data.records[0];
})
}
slackPostTest = async () => {
await axios.post('SLACK_WEBHOOK', {
"text": "soqlRes"
})
}
module.exports = router;
What I am trying to achieve?
Initialize my connection by passing in SEC_TOKEN, USER_ID, PASSWORD to my initConnect function this will give me a connection (conn).
Use this conn and query my salesforce instance and get some other data.
post some message(currently irrelevant, but will hook up with the above response later) to my slack endpoint.
Also can someone please give me a little detailed explanation of the solution (in terms of async/await)?
Thanks
Assuming everything else about your JsForce API usage was correct (I have no experience with it, so I can't say), here's how to promisify those callback-based APIs and call them.
var express = require("express");
var router = express.Router();
var jsforce = require("jsforce");
const SEC_TOKEN = "SEC_TOKEN";
const USER_ID = "USER_ID";
const PASSWORD = "PWD";
const { default: axios } = require("axios");
router.get("/", async (req, res, next) => {
const { conn, userInfo } = await initConnect();
const data = await soqlData(
conn,
"Select Id, Name from Account LIMIT 1",
);
await slackPostTest(data.records[0]);
});
function initConnect() {
const conn = new jsforce.Connection({
loginUrl: "https://login.salesforce.com",
});
return new Promise((resolve, reject) => {
conn.login(USER_ID, PASSWORD + SEC_TOKEN, (err, userInfo) => {
if (err) return reject(err);
resolve({ conn, userInfo });
});
});
}
function soqlData(conn, query) {
return new Promise((resolve, reject) => {
conn.query(query, (err, data) => {
if (err) return reject(err);
resolve(data);
});
});
}
function slackPostTest(soqlRes) {
return axios.post("SLACK_WEBHOOK", {
text: soqlRes,
});
}
module.exports = router;
I want to use pg_promise with feathersjs and vuejs to display results of query run on postgres db, my sql.js looks as follows
sql.js
const express = require('express');
const jsonexport = require('jsonexport');
const dbgeo = require('dbgeo');
const router = express.Router();
function dbGeoParse(data) {
return new Promise((resolve, reject) => {
dbgeo.parse(
data,
{
outputFormat: 'geojson',
},
(err, result) => {
if (err) {
reject(err);
} else {
resolve(result);
}
},
);
});
}
// expose sql endpoint, grab query as URL parameter and send it to the database
router.get('/sql', (req, res) => {
const { app } = req;
const sql = req.query.q;
const format = req.query.format || 'topojson';
console.log(`Executing SQL: ${sql}`, format); // eslint-disable-line
// query using pg-promise
app.db
.any(sql)
.then((data) => {
console.log('data',data)
// use dbgeo to convert WKB from PostGIS into topojson
return dbGeoParse(data).then((geojson) => {
res.setHeader('Content-disposition', 'attachment; filename=query.geojson');
res.setHeader('Content-Type', 'application/json');
return geojson;
});
})
.then((data) => {
res.send(data);
})
.catch((err) => {
// send the error message if the query didn't work
const msg = err.message || err;
console.log('ERROR:', msg); // eslint-disable-line
res.send({
error: msg,
});
});
});
module.exports = router;
I have configured my app.js to use sql.js as a route by adding following lines to it
const pgp = require('pg-promise')({
query(e) {
console.log(e.query); // eslint-disable-line
},
});
const connectionString = 'postgres://abc:abc123#localhost:5678/gisdb';
// initialize database connection
const app = express(feathers());
app.db = pgp(connectionString);
app.use('/sql', require('sql'));
but when I call handleSubmit fuction shown below from my vue component
handleSubmit() {
const SQL = "select * from spatial_search(searchmethod := 'id',sale_price :=10000,tax_ogcid :=
84678,distance := 0.5)";
const queryType = 'sql';
fetch(`/${queryType}?q=${encodeURIComponent(SQL)}`)
.then(res => {console.log('res',res.json())})}
I get the following error
Uncaught (in promise) SyntaxError: Unexpected token < in JSON at position 0
I think I might be doing something wrong here. Can someone help me?
I am trying to setup a local testing environment for my firebase cloud functions. However I run into problems when trying to do a fake call to one of my HTTP functions.
The reason for my error seems to be that I am using CORS (npm). When I remove cors and run the function "test" seen below with just a response.status(200) everything works. But when wrapping with cors(req,res) my test fails with TypeError: Cannot read property 'origin' of undefined.
What am I doing wrong here?
In index.js -->
exports.test = functions.https.onRequest((request, response) => {
cors(request, response, () => {
response.status(200);
response.send("test ok");
})
In my test.js
describe('Cloud Functions', () => {
// [START stubConfig]
var myFunctions, configStub, adminInitStub, functions, admin, cors;
before(() => {
// Since index.js makes calls to functions.config and admin.initializeApp at the top of the file,
// we need to stub both of these functions before requiring index.js. This is because the
// functions will be executed as a part of the require process.
// Here we stub admin.initializeApp to be a dummy function that doesn't do anything.
admin = require('firebase-admin');
cors = require('cors')({
origin: true
});
adminInitStub = sinon.stub(admin, 'initializeApp');
// Next we stub functions.config(). Normally config values are loaded from Cloud Runtime Config;
// here we'll just provide some fake values for firebase.databaseURL and firebase.storageBucket
// so that an error is not thrown during admin.initializeApp's parameter check
functions = require('firebase-functions');
configStub = sinon.stub(functions, 'config').returns({
firebase: {
databaseURL: 'https://not-a-project.firebaseio.com',
storageBucket: 'not-a-project.appspot.com',
}
// You can stub any other config values needed by your functions here, for example:
// foo: 'bar'
});
// Now we can require index.js and save the exports inside a namespace called myFunctions.
// This includes our cloud functions, which can now be accessed at myFunctions.makeUppercase
// and myFunctions.addMessage
myFunctions = require('../index');
});
after(() => {
// Restoring our stubs to the original methods.
configStub.restore();
adminInitStub.restore();
});
// [END stubConfig]
describe('test', () => {
it('should return status code 200', (done) => {
// [START invokeHTTPS]
// A fake request object, with req.query.text set to 'input'
const req = {};
// A fake response object, with a stubbed redirect function which asserts that it is called
// with parameters 303, 'new_ref'.
const res = {
status: (status) => {
assert.equal(status, 200);
done();
}
};
// Invoke addMessage with our fake request and response objects. This will cause the
// assertions in the response object to be evaluated.
myFunctions.test(req, res);
// [END invokeHTTPS]
})
})
})
Try this:
Instead of using const req = {}; use:
const req = {
headers: { origin: true },
};
Here's how I got around the cors errors, with sinon
const res = {};
Object.assign(res, {
status: sinon.stub().returns(res),
end: sinon.stub().returns(res),
json: sinon.stub().returns(res),
setHeader: sinon.stub(),
getHeader: sinon.stub(),
});
beforeEach(() => {
Object.values(res).forEach(stub => stub.resetHistory());
});
Then, in your test, you can test your responses:
cloudFunctions.testFunction({ query: { text: 'foo' } }, res);
response = res.json.lastCall.args[0];
Finaly the unit test with firebase function and cors is working
You have to setHeader and GetHeader in the response
setHeader: (key, value) => {},
getHeader: (value) => {},
The test
const test = require('firebase-functions-test')()
let myFunctions
beforeAll(() => {
myFunctions = require('../src/index')
})
afterAll(() => {
test.cleanup()
})
it('get', async () => {
let request = { headers: { origins: true } }
const response = {
setHeader: (key, value) => {
},
getHeader: (value) => {
},
status: (code) => {
expect(code).toBe(200)
return { send: (body) => {
expect(body).toBe('ok')
}
}
}
}
myFunctions.addLink(request, response)
})
The Index
const functions = require('firebase-functions')
const cors = require('cors')({
origin: true
})
exports.addLink = functions.https.onRequest((req, res) => {
return cors(req, res, () => {
res.status(200).send('ok')
})
})
add headers: { origin: '*' }, to your request and setHeader () {} to response
const req = {
{ origin: '*' }
};
const res = {
status: (status) => {
assert.equal(status, 200);
done();
}
};