Can this anyhow in the feature damage the flow they belong to?
I have a lambda that works behind a API Gateway websocket endpoint.
This simply asks for a clientId and a message payload, query all connections on dynamo for that clientId (multi device realtime dashboard frontend) and updates all interested users.
It's working fine if you test trought "wscat" on command line but it is buggy on real world browser using js websocket api or c# websocket api.
Doest this exceptin has anything to do with it?
const AWS = require("aws-sdk");
let dynamo = new AWS.DynamoDB.DocumentClient();
require("aws-sdk/clients/apigatewaymanagementapi");
const ORDERS_TABLE = "ordersTable";
const successfullResponse = {
statusCode: 200,
body: "everything is alright"
};
module.exports.sendMessageHandler = (event, context, callback) => {
console.log(event);
sendMessageToAllConnectedClientDevices(event)
.then(data => {
console.log("sucesso", data);
callback(null, successfullResponse);
})
.catch(err => {
console.log("erro: ", err);
callback(null, JSON.stringify(err));
});
};
const sendMessageToAllConnectedClientDevices = async event => {
try {
const body = JSON.parse(event.body);
const { clientId } = body;
console.log(
"handler.sendMessageToAllConnectedClientDevices.clientId: ",
clientId
);
const connectionIds = await getConnectionIds(clientId);
return await Promise.all(
connectionIds.Items.map(connectionId => {
send(event, connectionId.connectionId);
})
);
} catch (error) {
console.log("erro sendMessageToAllConnectedClientDevices");
return error;
}
};
const getConnectionIds = async clientId => {
console.log("handler.getConnectionIds.clientId: ", clientId);
const params = {
TableName: ORDERS_TABLE,
// IndexName: "client_gsi",
FilterExpression: "clientId = :cliend_id",
// KeyConditionExpression: "clientId = :cliend_id",
ProjectionExpression: "connectionId",
ExpressionAttributeValues: {
":cliend_id": clientId
}
};
console.log("handler.getConnectionIds.params: ", JSON.stringify(params));
const data = await dynamo.scan(params).promise();
return data;
};
const send = async (event, connectionId) => {
const body = JSON.parse(event.body);
const postData = body.data;
const endpoint =
event.requestContext.domainName + "/" + event.requestContext.stage;
const apigwManagementApi = new AWS.ApiGatewayManagementApi({
apiVersion: "2018-11-29",
endpoint: endpoint
});
const params = {
ConnectionId: connectionId,
Data: postData
};
return await apigwManagementApi.postToConnection(params).promise();
};
ERROR Unhandled Promise Rejection
I think problem is with API Gateway, check how you are handling information passing through to Lambda function (because browser sends some extra information as compared to command line call)
Related
I am attempting an axios POST, using Node js.
making a call to the api is in two steps, first doing a post to get an access token, then a get with that access token.
I have accomplished the call in c#
var baseUri = new Uri("www.example.com");
var requestToken = new HttpRequestMessage
{
Method = HttpMethod.Post,
RequestUri = new Uri(baseUri, "oauth/token"),
Content = new StringContent("grant_type=client_credentials&client_id=someIDnumHere&client_secret=somePassword")
};
using (HttpClient client = new HttpClient())
{
using (HttpResponseMessage response = client.SendAsync(requestToken).Result)
{
using (HttpContent content = response.Content)
{
var json = content.ReadAsStringAsync().Result;
dynamic jsonData = JsonConvert.DeserializeObject<dynamic>(json);
var accessy = jsonData.access_token;
accessTokens.Add(accessy.ToString());
}
}
and even Firefox REST Client
but I have failed (bad request) in node JS
this is what I tried.
let urly = 'https://example.com';
const newPost = {
body: 'grant_type=client_credentials&client_id=someIDHereclient_secret=somePasswordHere'
};
const sendPostRequest = async () => {
try {
const resp = await axios.post(urly, newPost);
console.log(resp.data);
} catch (err) {
// Handle Error Here
console.error(err);
}
};
sendPostRequest();
any help is appreciated.
Thanks to # zemaj
let urly = "https://example.com";
const sendPostRequest = async () => {
try {
const resp = await axios.post(
urly,
"grant_type=client_credentials&client_id=someIDHereclient_secret=somePasswordHere"
);
console.log(resp.data);
} catch (err) {
// Handle Error Here
console.error(err);
}
};
sendPostRequest();
I am somewhat new to coding and recently created this script in order to pull data from Zoom and push it to Google Drive via API. I am trying to push this to a Google Cloud Function, but when running it in a Cloud Function and console logging each step in the process, it seems like the uploadFile function, specifically the drive.files.create method, is being skipped. Every other step is being console logged, but neither the err or res is being logged after the drive.files.create method. Google Cloud Functions does not show errors, instead it shows OK and that the function took 1500ms to execute. It works fine on my local machine, I am only having issues in Cloud Functions. Any suggestions on how to get this to act right would be super helpful. Thank you!
const axios = require("axios");
require("dotenv").config();
const stream = require("stream");
const request = require("request");
const { google } = require("googleapis");
const KEYFILEPATH = "./credentials.json";
const SCOPES = ["https://www.googleapis.com/auth/drive"];
const auth = new google.auth.GoogleAuth({
keyFile: KEYFILEPATH,
scopes: SCOPES,
});
let today = new Date().toISOString();
let zoomAccessToken;
let zoomDownloadUrl;
///////////////////////////////////////////////////////////////// Searching for latest Town Hall recording in Google.
const searchFile = async (auth) => {
const service = google.drive({ version: "v3", auth });
const files = [];
try {
const res = await service.files.list({
corpora: "drive",
includeItemsFromAllDrives: true,
supportsAllDrives: true,
driveId: "XXXXXXXXXXXXXXXX",
q: '"XXXXXXXXXXXXXXX" in parents',
fields: "nextPageToken, files(id, name)",
spaces: "drive",
});
Array.prototype.push.apply(files, res.files);
const filesArray = res.data.files;
const filesName = filesArray.map((x) => x.name).sort().reverse()[0];
console.log(filesName);
return filesName;
} catch (err) {
throw err;
}
};
///////////////////////////////////////////////////////////////// Get Zoom OAuth access token.
const getAccessToken = async () => {
return axios({
method: "post",
url: `https://zoom.us/oauth/token?grant_type=account_credentials&account_id=${process.env.ZOOM_ACCOUNT_ID}`,
headers: {
Authorization: "Basic" +new Buffer.from(process.env.ZOOM_CLIENT_ID + ":" + process.env.ZOOM_CLIENT_SECRET).toString("base64"),
},
});
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's data.
const getRecordingData = async () => {
const token = await getAccessToken();
zoomAccessToken = await token.data.access_token;
return axios({
method: "get",
url: "https://api.zoom.us/v2/meetings/XXXXXXXXX/recordings",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${zoomAccessToken}`,
},
});
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's date.
const getRecordingDate = async () => {
const recording = await getRecordingData();
const lastRecordingDate = await recording.data.start_time;
const recordingDateFormatted = `${lastRecordingDate.substring(0,4)}.${lastRecordingDate.substring(5, 7)}.${lastRecordingDate.substring(8,10)} - Town Hall.mp4`;
return recordingDateFormatted;
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's download link.
const zoomDownloadLink = async () => {
const recording = await getRecordingData();
zoomDownloadUrl = `${recording.data.recording_files[0].download_url}?access_token=${zoomAccessToken}`;
return zoomDownloadUrl;
};
///////////////////////////////////////////////////////////////// Upload data from latest Town Hall recording's download link to Google Drive.
const uploadFile = async (auth) => {
const buffer = await zoomDownloadLink();
const bs = new stream.PassThrough();
request(buffer).pipe(bs);
const drive = google.drive({ version: "v3", auth });
var fileMetadata = {
name: `${today.substring(0, 4)}.${today.substring(5, 7)}.${today.substring(8,10)} - Town Hall.mp4`,
parents: ["XXXXXXXXXXXXXXXXX"],
};
var media = {
mimeType: "video/mp4",
body: bs,
};
drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
uploadType: "resumable",
supportsAllDrives: true,
},
function (err, res) {
if (err) {
console.log(err);
} else {
console.log("File Id: ", res.data.id);
}
}
);
};
///////////////////////////////////////////////////////////////// Compares Town Hall files in Google Drive and Zoom. If different, run uploadFile function.
exports.townHall = async () => {
const townHallFile = await searchFile(auth);
const lastRecordingDate = await getRecordingDate();
if (townHallFile != lastRecordingDate) {
await uploadFile(auth);
} else {
console.log("No Recording Today");
}
};
As you are calling an API inside a cloud function which is an async function but does not have a return statement, it will only execute the function but doesn't wait for the response, because the drive.files.create call is running.
So to fix that just need to await the result of the API. Just add
return await statement on the API call
like:
const uploadFile = async (auth) => {
const buffer = await zoomDownloadLink();
const bs = new stream.PassThrough();
request(buffer).pipe(bs);
const drive = google.drive({ version: "v3", auth });
var fileMetadata = {
name: `${today.substring(0, 4)}.${today.substring(5, 7)}.${today.substring(8,10)} - Town Hall.mp4`,
parents: ["XXXXXXXXXXXXXXXXX"],
};
var media = {
mimeType: "video/mp4",
body: bs,
};
return await drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
uploadType: "resumable",
supportsAllDrives: true,
},
function (err, res) {
if (err) {
console.log(err);
} else {
console.log("File Id: ", res.data.id);
}
}
);
};
Also, something important when you are calling APIs inside cloud functions is the time out. Check on your CF time out is enough to wait for the API call response.
Also, you can use the Promise function to force wait for the response:
const result = uploadFile(aut);
const _response = await Promise.all(result);
I am working on solutions using which i can send desktop push notification to subscribed clients.
I have created basic solution in where whenever user click on button i ask user for whether they want to allow notifications for my app or not!
I am getting an error of "Registration failed - permission denied" whenever i click on button for first time.
So that i am not able to get required endpoints to save at backend
Here is my code
index.html
<html>
<head>
<title>PUSH NOT</title>
<script src="index.js"></script>
</head>
<body>
<button onclick="main()">Ask Permission</button>
</body>
</html>
index.js
const check = () => {
if (!("serviceWorker" in navigator)) {
throw new Error("No Service Worker support!");
} else {
console.log("service worker supported")
}
if (!("PushManager" in window)) {
throw new Error("No Push API Support!");
} else {
console.log("PushManager worker supported")
}
};
const registerServiceWorker = async () => {
const swRegistration = await navigator.serviceWorker.register("/service.js?"+Math.random());
return swRegistration;
};
const requestNotificationPermission = async () => {
const permission = await window.Notification.requestPermission();
// value of permission can be 'granted', 'default', 'denied'
// granted: user has accepted the request
// default: user has dismissed the notification permission popup by clicking on x
// denied: user has denied the request.
if (permission !== "granted") {
throw new Error("Permission not granted for Notification");
}
};
const main = async () => {
check();
const swRegistration = await registerServiceWorker();
const permission = await requestNotificationPermission();
};
// main(); we will not call main in the beginning.
service.js
// urlB64ToUint8Array is a magic function that will encode the base64 public key
// to Array buffer which is needed by the subscription option
const urlB64ToUint8Array = base64String => {
const padding = "=".repeat((4 - (base64String.length % 4)) % 4);
const base64 = (base64String + padding)
.replace(/\-/g, "+")
.replace(/_/g, "/");
const rawData = atob(base64);
const outputArray = new Uint8Array(rawData.length);
for (let i = 0; i < rawData.length; ++i) {
outputArray[i] = rawData.charCodeAt(i);
}
return outputArray;
};
const saveSubscription = async subscription => {
console.log("Save Sub")
const SERVER_URL = "http://localhost:4000/save-subscription";
const response = await fetch(SERVER_URL, {
method: "post",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(subscription)
});
return response.json();
};
self.addEventListener("activate", async () => {
try {
const applicationServerKey = urlB64ToUint8Array(
"BFPtpIVOcn2y25il322-bHQIqXXm-OACBtFLdo0EnzGfs-jIGXgAzjY6vNapPb4MM1Z1WuTBUo0wcIpQznLhVGM"
);
const options = { applicationServerKey, userVisibleOnly: true };
const subscription = await self.registration.pushManager.subscribe(options);
console.log(JSON.stringify(subscription))
const response = await saveSubscription(subscription);
} catch (err) {
console.log(err.code)
console.log(err.message)
console.log(err.name)
console.log('Error', err)
}
});
self.addEventListener("push", function(event) {
if (event.data) {
console.log("Push event!! ", event.data.text());
} else {
console.log("Push event but no data");
}
});
Also i have created a bit of backend as well
const express = require("express");
const cors = require("cors");
const bodyParser = require("body-parser");
const webpush = require('web-push')
const app = express();
app.use(cors());
app.use(bodyParser.json());
const port = 4000;
app.get("/", (req, res) => res.send("Hello World!"));
const dummyDb = { subscription: null }; //dummy in memory store
const saveToDatabase = async subscription => {
// Since this is a demo app, I am going to save this in a dummy in memory store. Do not do this in your apps.
// Here you should be writing your db logic to save it.
dummyDb.subscription = subscription;
};
// The new /save-subscription endpoint
app.post("/save-subscription", async (req, res) => {
const subscription = req.body;
await saveToDatabase(subscription); //Method to save the subscription to Database
res.json({ message: "success" });
});
const vapidKeys = {
publicKey:
'BFPtpIVOcn2y25il322-bHQIqXXm-OACBtFLdo0EnzGfs-jIGXgAzjY6vNapPb4MM1Z1WuTBUo0wcIpQznLhVGM',
privateKey: 'mHSKS-uwqAiaiOgt4NMbzYUb7bseXydmKObi4v4bN6U',
}
webpush.setVapidDetails(
'mailto:janakprajapati90#email.com',
vapidKeys.publicKey,
vapidKeys.privateKey
)
const sendNotification = (subscription, dataToSend='') => {
webpush.sendNotification(subscription, dataToSend)
}
app.get('/send-notification', (req, res) => {
const subscription = {endpoint:"https://fcm.googleapis.com/fcm/send/dLjyDYvI8yo:APA91bErM4sn_wRIW6xCievhRZeJcIxTiH4r_oa58JG9PHUaHwX7hQlhMqp32xEKUrMFJpBTi14DeOlECrTsYduvHTTnb8lHVUv3DkS1FOT41hMK6zwMvlRvgWU_QDDS_GBYIMRbzjhg",expirationTime:null,keys:{"p256dh":"BE6kUQ4WTx6v8H-wtChgKAxh3hTiZhpfi4DqACBgNRoJHt44XymOWFkQTvRPnS_S9kmcOoDSgOVD4Wo8qDQzsS0",auth:"CfO4rOsisyA6axdxeFgI_g"}} //get subscription from your databse here.
const message = 'Hello World'
sendNotification(subscription, message)
res.json({ message: 'message sent' })
})
app.listen(port, () => console.log(`Example app listening on port ${port}!`));
Please help me
Try the following code:
index.js
const check = () => {
if (!("serviceWorker" in navigator)) {
throw new Error("No Service Worker support!");
} else {
console.log("service worker supported")
}
if (!("PushManager" in window)) {
throw new Error("No Push API Support!");
} else {
console.log("PushManager worker supported")
}
};
const saveSubscription = async subscription => {
console.log("Save Sub")
const SERVER_URL = "http://localhost:4000/save-subscription";
const response = await fetch(SERVER_URL, {
method: "post",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(subscription)
});
return response.json();
};
const urlB64ToUint8Array = base64String => {
const padding = "=".repeat((4 - (base64String.length % 4)) % 4);
const base64 = (base64String + padding)
.replace(/\-/g, "+")
.replace(/_/g, "/");
const rawData = atob(base64);
const outputArray = new Uint8Array(rawData.length);
for (let i = 0; i < rawData.length; ++i) {
outputArray[i] = rawData.charCodeAt(i);
}
return outputArray;
};
const registerServiceWorker = async () => {
return navigator.serviceWorker.register("service.js?"+Math.random()).then((swRegistration) => {
console.log(swRegistration);
return swRegistration;
});
};
const requestNotificationPermission = async (swRegistration) => {
return window.Notification.requestPermission().then(() => {
const applicationServerKey = urlB64ToUint8Array(
"BFPtpIVOcn2y25il322-bHQIqXXm-OACBtFLdo0EnzGfs-jIGXgAzjY6vNapPb4MM1Z1WuTBUo0wcIpQznLhVGM"
);
const options = { applicationServerKey, userVisibleOnly: true };
return swRegistration.pushManager.subscribe(options).then((pushSubscription) => {
console.log(pushSubscription);
return pushSubscription;
});
});
};
const main = async () => {
check();
const swRegistration = await registerServiceWorker();
const subscription = await requestNotificationPermission(swRegistration);
// saveSubscription(subscription);
};
service.js
self.addEventListener("push", function(event) {
if (event.data) {
console.log("Push event!! ", event.data.text());
} else {
console.log("Push event but no data");
}
});
I can think of three reasons that the permission is denied
1) your site is not on https (including localhost that is not on https), the default behaviour from chrome as far as i know is to block notifications on http sites. If that's the case, click on the info icon near the url, then click on site settings, then change notifications to ask
2) if you are on Safari, then safari is using the deprecated interface of the Request permission, that is to say the value is not returned through the promise but through a callback so instead of
Notification.requestPermission().then(res => console.log(res))
it is
Notification.requestPermission(res => console.log(res))
3) Your browser settings are blocking the notifications request globally, to ensure that this is not your problem run the following code in the console (on a secured https site)
Notification.requestPermission().then(res => console.log(res))
if you receive the alert box then the problem is something else, if you don't then make sure that the browser is not blocking notifications requests
I'm trying to use nock in my tests to intercept the request calls i'm making from the native https module in Node.js. I'm using Promise.all to make two requests to the external server. I want my tests to intercept the calls, and check some of the form fields to make sure they're filled in as i want.
I have my class setup below (kept the most relevant parts of code in):
const archiver = require('archiver');
const { generateKeyPairSync } = require('crypto');
const FormData = require('form-data');
const fs = require('fs');
const https = require('https');
class Platform {
constructor() {
this.FILESTORE_USERNAME = process.env.FILESTORE_USERNAME;
this.FILESTORE_PASSWORD = process.env.FILESTORE_PASSWORD;
}
store(serviceName) {
const { publicKey, privateKey } = this._generateKeys();
return Promise.all([this._postKey(publicKey), this._postKey(privateKey)])
.then(() => {
return this._zipKeys(publicKey, privateKey, serviceName);
})
.catch((err) => {
throw err;
});
}
_postKey(key) {
const options = this._getOptions();
const keyName = (key.search(/(PUBLIC)/) !== -1) ? 'publicKey' : 'privateKey';
const form = new FormData();
form.append('file', key);
form.append('Name', keyName);
form.append('MimeMajor', 'application');
form.append('MimeMinor', 'x-pem-file');
form.append('Extension', (keyName == 'publicKey') ? 'pub' : '');
form.append('FileClass', 'MFS::File');
options.headers = form.getHeaders();
options.headers.Authorization = 'Basic ' + Buffer.from(this.FILESTORE_USERNAME + ':' + this.FILESTORE_PASSWORD).toString('base64');
return new Promise((resolve, reject) => {
let post = https.request(options, (res) => {
let data = '';
if (res.statusCode < 200 || res.statusCode > 299) {
reject(new Error('File Storage API returned a status code outside of acceptable range: ' + res.statusCode));
} else {
res.setEncoding('utf8');
res.on('data', (chunk) => {
data += chunk;
});
res.on('error', (err) => {
reject(err);
});
res.on('end', () => {
if (data) {
resolve(JSON.parse(data));
} else {
resolve();
}
});
}
});
post.on('error', (err) => {
reject(err);
});
form.pipe(post);
post.end();
});
}
_getOptions() {
return {
hostname: 'api.example.com',
path: '/media/files/',
method: 'POST',
};
}
}
module.exports = Platform;
And then, my testing code looks like the below. I'm using mocha, sinon, chai, sinon-chai and nock.
const Platform = require('/usr/src/app/api/Services/Platform');
const crypto = require('crypto');
const fs = require('fs');
const nock = require('nock');
const yauzl = require('yauzl');
describe('Platform', function() {
let platform;
beforeEach(() => {
platform = new Platform();
});
afterEach(() => {
const list = fs.readdirSync('/usr/src/app/api/Services/data/');
list.forEach((file) => {
fs.unlink('/usr/src/app/api/Services/data/' + file, (err) => {
if (err) throw err;
});
});
nock.cleanAll();
});
after(() => {
nock.restore();
});
describe('store', function() {
it('should post each generated key to an external storage place', async function() {
this.timeout(5000);
// const stub = sinon.stub(platform, '_postKey').resolves();
const scope = nock('https://api.example.com')
.persist()
.post('/media/files/', (body) => {
// console.log(body);
})
.reply(200);
let serviceName = 'test';
let actual = await platform.store(serviceName)
.catch((err) => {
(() => { throw err; }).should.not.throw();
});
console.log(scope);
// expect(stub.callCount).to.equal(2);
expect(actual).to.be.a('string');
expect(actual).to.include(serviceName + '.zip');
// stub.reset();
});
});
});
The problem I am coming across is this error that is thrown when running my tests:
AssertionError: expected [Function] to not throw an error but 'Error:
Nock: No match for request {\n "method": "POST",\n "url":
"https://api.example.com/media/files/",\n "headers": {\n
"content-type": "multipart/form-data;
boundary=--------------------------363749230271182821931703",\n
"authorization": "Basic abcdef1224u38454857483hfjdhjgtuyrwyt="\n },\n
"body":
"----------------------------363749230271182821931703\r\nContent-Disposition: form-data; name=\"file\"\r\n\r\n-----BEGIN PUBLIC
KEY-----\nMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq+QnVOYVjbrHIlAEsEoF\nZ4sTvqiB3sJGwecNhmgrUp9U8oqgoB50aW6VMsL71ATRyq9b3vMQKpjbU3R2RcOF\na6mlaBtBjxDGu2nEpGX++mtPCdD9HV7idvWgJ3XS0vGaCM//8ukY+VLBc1IB8CHC\nVj+8YOD5Y9TbdpwXR+0zCaiHwwd8MHIo1kBmQulIL7Avtjh55OmQZZtjO525lbqa\nWUZ24quDp38he2GjLDeTzHm9z1RjYJG6hS+Ui0s2xRUs6VAr7KFtiJmmjxPS9/vZ\nwQyFcz/R7AJKoEH8p7NE7nn/onbybJy+SWRxjXVH8afHkVoC65BiNoMiEzk1rIsx\ns92woHnq227JzYwFYcLD0W+TYjtGCB8+ks+QRIiV0pFJ3ja5VFIxjn9MxLntWcf2\nhsiYrmfJlqmpW1DMfZrtt41cJUFQwt7CpN72aix7btmd/q0syh6VVlQEHq/0nDky\nItv7dqyqZc9NNOMqK9/kXWhbq5cwS21mm+kTGas5KSdeIR0LH7uVtivB+LKum14e\nRDGascZcXZIVTbOeCxA6BD7LyaJPzXmlMy4spXlhGoDYyVRhpvv2K03Mg7ybiB4X\nEL1oJtiCFkRX5LtRJv0PCRJjaa3UvfyIuz8bHK4ANxIZqcwZwER+g02gw8iqNfMa\nDWXpfMJUU8TQuLGWZQaGJc8CAwEAAQ==\n-----END
PUBLIC
KEY-----\n\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"Name\"\r\n\r\npublicKey\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"MimeMajor\"\r\n\r\napplication\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"MimeMinor\"\r\n\r\nx-pem-file\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"Extension\"\r\n\r\npub\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"FileClass\"\r\n\r\nMFS::File\r\n----------------------------363749230271182821931703--\r\n"\n}'
was thrown
I take it it's because nock expects me to fake out the body for the request to get a correct match? Is there a way of just looking for requests made to that address, regardless of the body, so I can do my own tests or whatever.
When the post method of a Nock Scope is passed a second argument, it is used to match against the body of the request.
Docs for specifying the request body
In your test, you're passing a function as the second argument, but not returning true so Nock is not considering it a match.
From the docs:
Function: nock will evaluate the function providing the request body
object as first argument. Return true if it should be considered a
match
Since your goal is to assert form fields on the request, your best approach would be to leave the function there, do your assertions where the // console.log(body); line is, but add return true; to the end of the function.
You could also return true or false depending on if your form fields match your assertions, but in my experience it makes the error output from the test convoluted. My preference is to use standard chai expect() calls and let the assertions bubble errors before Nock continues with request matching.
For my magazine app,I am using Firebase service.One function of this android app is whenever new article is published;notification of new article is sent to all the devices.
I am saving all the device tokens in db like this:
FCMToken
{
userid:deviceToken
}
So whenever new node is added in "published" key in firebase db,FCM function is triggered and messages is sent to all the devices:
Below is my code in javascript for FCM function:
'use strict'
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
exports.sendNotification = functions.database.ref('/published/{msg_id}').onWrite(event => {
const snapshot = event.data;
// Only send a notification when a new message has been created.
if (snapshot.previous.val()) {
return;
}
const msg_id = event.params.msg_id;
const msg_val=admin.database().ref(`messages/${msg_id}`).once('value');
return msg_val.then(msgResult =>{
const msg_title=msgResult.val().title;
const user_id=msgResult.val().userId;
console.log('msg title is',msg_title);
console.log('We have a new article : ', msg_id);
const payload={
data : {
title:"New Article",
body: msg_title,
msgid : msg_id,
userid : user_id
}
};
// const deviceToken = admin.database().ref('/FCMToken/{user_id}').once('value');
admin.database().ref('/FCMToken').on("value", function(dbsnapshot)
{
dbsnapshot.forEach(function(childSnapshot) {
//var childKey = childSnapshot.key;
const childData = childSnapshot.val();
const deviceToken=console.log("device token" + childSnapshot.val());
return admin.messaging().sendToDevice(childData,payload).then(response=>{
console.log("This was notification feature")
console.log("response: ", response);
})
.catch(function(error)
{
console.log("error sending message",error)
});
});
});
});
});
For some reason,notification is only sent to only 1 device(the first token in FCM node).
Update:
I have updated my code and using promise,but for some reason it is still not working,just sending notification to first device token.
'use strict'
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
exports.sendNotification = functions.database.ref('/published/{msg_id}').onWrite(event => {
const snapshot = event.data;
// Only send a notification when a new message has been created.
if (snapshot.previous.val()) {
return;
}
const msg_id = event.params.msg_id;
const msg_val=admin.database().ref(`messages/${msg_id}`).once('value');
return msg_val.then(msgResult =>{
const msg_title=msgResult.val().title;
const user_id=msgResult.val().userId;
console.log('msg title is',msg_title);
console.log('We have a new article : ', msg_id);
const payload={
data : {
title:"New Article",
body: msg_title,
msgid : msg_id,
userid : user_id
}
};
const promises=[];
// const deviceToken = admin.database().ref('/FCMToken/{user_id}').once('value');
admin.database().ref('/FCMToken').once('value').then(function(dbsnapshot)
{
dbsnapshot.forEach(function(childSnapshot) {
//var childKey = childSnapshot.key;
const childData = childSnapshot.val();
const deviceToken=console.log("device token" + childSnapshot.val());
const promise = admin.messaging().sendToDevice(childData,payload).then(response=>{
promises.push(promise)
console.log("This was notification feature")
console.log("response: ", response);
})
return Promise.all(promises)
.catch(function(error)
{
console.log("error sending message",error)
});
});
});
});
});
Response object is giving this output: response: { results: [ { error: [Object] } ],
canonicalRegistrationTokenCount: 0,
failureCount: 1,
successCount: 0,
multicastId: 6411440389982586000 }
You're not using promises correctly throughout your function. There are two things wrong.
First, you should be querying the database using once() instead of on(), and using the promise returned from it in order to proceed to the next item of work:
admin.database().ref('/FCMToken').on("value")
.then(result => /* continue your work here */)
Also, you can't return a promise out of the forEach loop. Instead, you need to return a promise at the top level of the function, as the very last step in the function. This promise needs to resolve when all of the work is done in this function. For your function, this means when all of the messages are sent. You'll have to collect all the promises for all of the messages in an array, then return a single promise that resolves when they all resolve. The general form of that looks like this:
const promises = []
dbsnapshot.forEach(function(childSnapshot) {
// remember each promise for each message sent
const promise = return admin.messaging().sendToDevice(...)
promises.push(promise)
})
// return a single promise that resolves when everything is done
return Promise.all(promises)
Please take care to learn how promises work in JavaScript. You won't be able to write effective functions without dealing with promises correctly.
So I figured out another method to get values.
const tokens= Object.keys(tokensSnapshot.val()).map(e => tokensSnapshot.val()[e]);
Below is my complete method:
'use strict'
const functions = require('firebase-functions');
const admin = require('firebase-admin');
//Object.values = require('object.values');
admin.initializeApp(functions.config().firebase);
exports.sendNotification = functions.database.ref('/published/{msg_id}').onWrite(event => {
const snapshot = event.data;
// Only send a notification when a new message has been created.
if (snapshot.previous.val()) {
return;
}
const msg_id = event.params.msg_id;
const msg_val=admin.database().ref(`messages/${msg_id}`).once('value');
return msg_val.then(msgResult =>{
const msg_title=msgResult.val().title;
const user_id=msgResult.val().userId;
console.log('msg title is',msg_title);
console.log('We have a new article : ', msg_id);
const payload={
data : {
title:"New Article",
body: msg_title,
msgid : msg_id,
userid : user_id
}
};
const getDeviceTokensPromise = admin.database().ref('/FCMToken').once('value');
return Promise.all([getDeviceTokensPromise, msg_title]).then(results => {
const tokensSnapshot = results[0];
const msgi = results[1];
if (!tokensSnapshot.hasChildren()) {
return console.log('There are no notification tokens to send to.');
}
console.log('There are', tokensSnapshot.numChildren(), 'tokens to send notifications to.');
console.log("tokenslist",tokensSnapshot.val());
const tokens= Object.keys(tokensSnapshot.val()).map(e => tokensSnapshot.val()[e]);
//var values = Object.keys(o).map(e => obj[e])
return admin.messaging().sendToDevice(tokens, payload).then(response => {
// For each message check if there was an error.
const tokensToRemove = [];
response.results.forEach((result, index) => {
const error = result.error;
if (error) {
console.error('Failure sending notification to', tokens[index], error);
// Cleanup the tokens who are not registered anymore.
}
});
return Promise.all(tokensToRemove);
});
});
});
});