Unable to establish WebRTC connection with Node JS server as a peer - javascript

I am trying to send images captured from a canvas to my NodeJS backend server using the WebRTC data channel. That is I am trying to make my server a peer. But for some reason, I am unable to establish a connection.
Client Side
async function initChannel()
{
const offer = await peer.createOffer();
await peer.setLocalDescription(offer);
const response = await fetch("/connect", {
headers: {
'Content-Type': 'application/json',
},
method: 'post',
body: JSON.stringify({ sdp: offer, id: Math.random() })
}).then((res) => res.json());
peer.setRemoteDescription(response.sdp);
const imageChannel = peer.createDataChannel("imageChannel", { ordered: false, maxPacketLifeTime: 100 });
peer.addEventListener("icecandidate", console.log);
peer.addEventListener("icegatheringstatechange",console.log);
// drawCanvas function draws images got from the server.
imageChannel.addEventListener("message", message => drawCanvas(remoteCanvasCtx, message.data, imageChannel));
// captureImage function captures and sends image to server using imageChannel.send()
imageChannel.addEventListener("open", () => captureImage(recordCanvasCtx, recordCanvas, imageChannel));
}
const peer = new RTCPeerConnection({ iceServers: [{ urls: "stun:stun.stunprotocol.org:3478" }] });
initChannel();
Here both captureImage and drawCanvas are not being invoked.
Server Side
import webrtc from "wrtc"; // The wrtc module ( npm i wrtc )
function handleChannel(channel)
{
console.log(channel.label); // This function is not being called.
}
app.use(express.static(resolve(__dirname, "public")))
.use(bodyParser.json())
.use(bodyParser.urlencoded({ extended: true }));
app.post("/connect", async ({ body }, res) =>
{
console.log("Connecting to client...");
let answer, id = body.id;
const peer = new webrtc.RTCPeerConnection({ iceServers: [{ urls: "stun:stun.stunprotocol.org:3478" }] });
await peer.setRemoteDescription(new webrtc.RTCSessionDescription(body.sdp));
await peer.setLocalDescription(answer = await peer.createAnswer());
peer.addEventListener("datachannel",handleChannel)
return res.json({ sdp: answer });
});
app.listen(process.env.PORT || 2000);
Here the post request is handled fine but handleChannel is never called.
When I run this I don't get any errors but when I check the connection status it shows "new" forever. I console logged remote and local description and they seem to be all set.
What am I doing wrong here?
I am pretty new to WebRTC and I am not even sure if this is the correct approach to continuously send images (frames of user's webcam feed) to and back from the server, if anyone can tell me a better way please do.
And one more thing, how can I send image blobs ( got from canvas.toBlob() ) via the data channel with low latency.

I finally figured this out with the help of a friend of mine. The problem was that I have to create DataChannel before calling peer.createOffer(). peer.onnegotiationneeded callback is only called once the a channel is created. Usually this happens when you create a media channel ( either audio or video ) by passing a stream to WebRTC, but here since I am not using them I have to to this this way.
Client Side
const peer = new RTCPeerConnection({ iceServers: [{ urls: "stun:stun.l.google.com:19302" }] });
const imageChannel = peer.createDataChannel("imageChannel");
imageChannel.onmessage = ({ data }) =>
{
// Do something with received data.
};
imageChannel.onopen = () => imageChannel.send(imageData);// Data channel opened, start sending data.
peer.onnegotiationneeded = initChannel
async function initChannel()
{
const offer = await peer.createOffer();
await peer.setLocalDescription(offer);
// Send offer and fetch answer from the server
const { sdp } = await fetch("/connect", {
headers: {
"Content-Type": "application/json",
},
method: "post",
body: JSON.stringify({ sdp: peer.localDescription }),
})
.then(res => res.json());
peer.setRemoteDescription(new RTCSessionDescription(sdp));
}
Server
Receive offer from client sent via post request. Create an answer for it and send as response.
app.post('/connect', async ({ body }, res) =>
{
const peer = new webrtc.RTCPeerConnection({
iceServers: [{ urls: 'stun:stun.l.google.com:19302' }],
});
console.log('Connecting to client...');
peer.ondatachannel = handleChannel;
await peer.setRemoteDescription(new webrtc.RTCSessionDescription(body.sdp));
await peer.setLocalDescription(await peer.createAnswer());
return res.json({ sdp: peer.localDescription });
});
The function to handle data channel.
/**
* This function is called once a data channel is ready.
*
* #param {{ type: 'datachannel', channel: RTCDataChannel }} event
*/
function handleChannel({ channel })
{
channel.addEventListener("message", {data} =>
{
// Do something with data received from client.
});
// Can use the channel to send data to client.
channel.send("Hi from server");
}
So here is what happens :
Client creates a Data-Channel.
Once data channel is created onnegotiationneeded callback is called.
The client creates an offer and sends it to the server (as post request).
Server receives the offer and creates an answer.
Server sends the answer back to the client (as post response).
Client completes the initialization using the received answer.
ondatachannel callback gets called on the server and the client.
I have used post request here to exchange offer and answer but it should be fairly easy to do the same using Web Socket if that is what you prefer.

Related

How to trigger an API call when a message is sent to Azure service bus topic?

I am working on Azure service bus topic. Following the documentation, created a sender and reciever code.
This is the sender code i am having,
const { ServiceBusClient } = require("#azure/service-bus");
const connectionString = "<SERVICE BUS NAMESPACE CONNECTION STRING>"
const topicName = "<TOPIC NAME>";
const messages = [
{ body: "Albert Einstein" },
{ body: "Werner Heisenberg" },
{ body: "Marie Curie" },
{ body: "Steven Hawking" },
{ body: "Isaac Newton" },
{ body: "Niels Bohr" },
{ body: "Michael Faraday" },
{ body: "Galileo Galilei" },
{ body: "Johannes Kepler" },
{ body: "Nikolaus Kopernikus" }
];
async function main() {
// create a Service Bus client using the connection string to the Service Bus namespace
const sbClient = new ServiceBusClient(connectionString);
// createSender() can also be used to create a sender for a queue.
const sender = sbClient.createSender(topicName);
try {
// Tries to send all messages in a single batch.
// Will fail if the messages cannot fit in a batch.
// await sender.sendMessages(messages);
// create a batch object
let batch = await sender.createMessageBatch();
for (let i = 0; i < messages.length; i++) {
// for each message in the arry
// try to add the message to the batch
if (!batch.tryAddMessage(messages[i])) {
// if it fails to add the message to the current batch
// send the current batch as it is full
await sender.sendMessages(batch);
// then, create a new batch
batch = await sender.createMessageBatch();
// now, add the message failed to be added to the previous batch to this batch
if (!batch.tryAddMessage(messages[i])) {
// if it still can't be added to the batch, the message is probably too big to fit in a batch
throw new Error("Message too big to fit in a batch");
}
}
}
// Send the last created batch of messages to the topic
await sender.sendMessages(batch);
console.log(`Sent a batch of messages to the topic: ${topicName}`);
// Close the sender
await sender.close();
} finally {
await sbClient.close();
}
}
// call the main function
main().catch((err) => {
console.log("Error occurred: ", err);
process.exit(1);
});
This code is working fine, but instead of sending a batch of dummy data to the service bus topic i want to implement my use case here.
My use case is I will be using this sender code in a react front end application, where there is a node API call happening at the end of a form submission. So at the end of form submission, i will send that unique form ID to the topic and i need to somehow trigger the api call for that form id.
I am unable to connect the dots. How to do this?
Added reciever side code.
const { delay, ServiceBusClient, ServiceBusMessage } = require("#azure/service-bus");
const axios = require("axios").default;
const connectionString = "<ConnectionString>"
const topicName = "<TopicName>";
const subscriptionName = "<Subscription>";
async function main() {
// create a Service Bus client using the connection string to the Service Bus namespace
const sbClient = new ServiceBusClient(connectionString);
// createReceiver() can also be used to create a receiver for a queue.
const receiver = sbClient.createReceiver(topicName, subscriptionName);
// function to handle messages
const myMessageHandler = async (messageReceived) => {
console.log(`Received message: ${messageReceived.body}`);
const response = axios({
method: 'post',
url: 'http://localhost:8080/gitWrite?userprojectid=63874e2e3981e40a6f4e04a7',
});
console.log(response);
};
// function to handle any errors
const myErrorHandler = async (error) => {
console.log(error);
};
// subscribe and specify the message and error handlers
receiver.subscribe({
processMessage: myMessageHandler,
processError: myErrorHandler
});
// Waiting long enough before closing the sender to send messages
await delay(5000);
await receiver.close();
await sbClient.close();
}
// call the main function
main().catch((err) => {
console.log("Error occurred: ", err);
process.exit(1);
});
While messages are published to a topic, they are recieved by subscriptions under the topic. You'll need to define one or more subscriptions to receive the messages. That's on the broker. For your code, you'll need a receiving code on the server-side/backend. Could be something like a node.js service or Azure Function. But a code that would receive from the subscription(s).
I would review the idea of publishing messages from the client side directly to Azure Service Bus. If the code is a React front end application, make sure the connection string is not embedded in resources or can be revealed.

Backend in heroku duplicate inserts/ frontend in heroku duplicate request (I dont know)

I have running an app in heroku, separate in back and front
back: node + express + mysql
front: vue
the app works fine, but I have an error random: sometimes i have duplicates records inserted from frontend. (I guess the error comes from the front)
from the frontend I use fetch to add the records
const requestOptions = {
method: "POST",
headers: {
"Accept": "application/json",
"Content-Type": "application/json"
},
body: JSON.stringify({
data: data,
...: ...,
}),
};
const response = await fetch(`url_backend_heroku/api/add`, requestOptions);
the records are inserted correctly , but sometimes are inserted duplicated with correct next id
Could it be that fetch is sending 2 requests in some cases?
before deploying in heroku on my local machine I never duplicate records
I've been going around for days and I can't find why this happens
Yeah it is possible you are sending 2 requests somewhere. Put logs in heroku BACK on the specific endpoint to see whats exactly happening.
Also while requesting from Front end check your network tab in developer tools to see if you are actually firing the request twice. Because as you said duplicate records but they have correct ID's , could only mean what you said.
Also, this might or might not be true, but heroku server sleeps on inactivity, so it is possible that might cause an issue but I am not entirely sure on that, will have to check the code and environment for that.
looking at the heroku (back) log it looks like 2 records were inserted normally
looking at the network tab in the browser, only 1 request appears: OPTIONS (204) and POST (200)
the table has an id that is the primary key nothing complicated
on the other hand I am on a Dynos Hobby plan that does not have sleep times (if the free)
put here part or my backend
database.js
const mysql = require('mysql')
const { promisify } = require('util')
const config = { database keys }
const pool = mysql.createPool(config);
pool.getConnection((err: any, connection: any) => {
if (err) {
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
console.error('DATABASE CONNECTION WAS CLOSED')
}
if (err.code === 'ER_CON_COUNT_ERROR') {
console.error('DATABASE HAS TO MANY CONNECTIONS')
}
if (err.code === 'ECONNREFUSED') {
console.error('DATABASE CONNECTION WAS REFUSED')
}
}
if (connection) connection.release()
console.log('DB is Connected')
return
})
pool.query = promisify(pool.query)
export default pool
controller.js
import pool from '../database';
public async insert(req: Request, res: Response) {
let body = req.body
try {
const response = await pool.query('SELECT MAX(id) + 1 as idNew FROM table')
let data = {Id: response[0].idNew, ...body}
//before it had an auto_increment id and it also failed
const result = await pool.query('INSERT INTO table set ?', [data])
res.json({
insertId: response[0].idNew,
message: "Saved OK"
})
} catch (error) {
console.log("error", error)
return res.status(500).send("error")
}
}
can it be a fetch problem? don't try yet to use axios for example

Serverless WebSockets - No method found matching route #connections/* for http method POST

I'm using Serverless Framework to host my WebSocket, which has the typical $connect, $disconnect, $default, etc methods that updates my connections db:
case '$connect':
await dynamoDb.put({
TableName: process.env.CONNECTIONS_TABLE,
Item: {
connectionId,
// Expire the connection an hour later. This is optional, but recommended.
// You will have to decide how often to time out and/or refresh the ttl.
ttl: parseInt((Date.now() / 1000) + 3600)
}
}).promise();
My WebSocket setup is:
WebSocket URL: wss://1111111111.execute-api.ap-southeast-2.amazonaws.com/dev/
Connection URL: https://1111111111.execute-api.ap-southeast-2.amazonaws.com/dev/#connections
My HTTP setup is:
Invoke API at: https://222222222.execute-api.ap-southeast-2.amazonaws.com/dev/
I have a broadcast function which I am using to send data to the connections, which I am invoking with:
sls invoke --function broadcast --data '{ \"body\": \"Hello from server\" }'
The source sends a message to each connection, as provided in the params of the request:
async function sendMessage(connectionId, body) {
try {
await apig.postToConnection({
ConnectionId: connectionId,
Data: body
}).promise();
} catch (err) {
// Ignore if connection no longer exists
if(err.statusCode !== 400 && err.statusCode !== 410) {
throw err;
}
}
}
async function getAllConnections(ExclusiveStartKey) {
const { Items, LastEvaluatedKey } = await dynamoDb.scan({
TableName: process.env.CONNECTIONS_TABLE,
AttributesToGet: [ 'connectionId' ],
ExclusiveStartKey
}).promise();
const connections = Items.map(({ connectionId }) => connectionId);
if(LastEvaluatedKey) {
connections.push(...await getAllConnections(LastEvaluatedKey));
}
return connections;
}
module.exports.handler = async function(event, context) {
const { body } = event;
const connections = await getAllConnections();
await Promise.all(
connections.map(connectionId => sendMessage(connectionId, body))
);
}
A connection can be established (I can connect to the WebSocket), however when I try to invoke this function I am receiving the error:
No method found matching route #connections/ZE4SDfSJSwMCJ4g%3D for http method POST.
The ZE4SDfSJSwMCJ4g is my connectionId, which exists in my database. I am unsure if this routing issue has to do with my HTTP API and my WebSocket API being pointed at different API Gateway URL?
I appreciate the help!
Make sure that the endpoint in your API Gateway management configuration is the same as your ws endpoint.
const agma = new AWS.ApiGatewayManagementApi({
apiVersion: AGMA_VERSION,
endpoint: WS_ENDPOINT // 1111111111.execute-api.ap-southeast-2.amazonaws.com/dev
})
I too had this issue with websockets and google cloud functions. I think it's because the web socket server is put to sleep and doesn't actively listen because it's serverless.
Functions are configured to wake up on http requests (ports 80/443) and go back to sleep, so they need to be specially configured to wake up to requests on web socket port.
This seems possible using a serverless websockets plugin https://github.com/serverless/serverless-websockets-plugin there is a how to article at https://www.serverless.com/blog/api-gateway-websockets-example

How could I increase consume timeout in rpc endpoint at rabbitmq?

I use RPC endpoints and in one of them I have the following problem: I do not receive a message so the callback function is not executed on channel.consume().
At that endpoint, I send a message, a process that takes time is running on the server side and it responds to me with a message about whether the process was executed correctly. At other endpoints where the message is sent immediately by the server there is no problem.
I think there is a problem with the timeout. I tried to place the object {timeout: 3600000} after amqpOptions but again the problem was not solved. Specifically, the connection and channel objects have the same parameters regardless of the object I added. How could I change the timeout correctly?
const amqp = require('amqplib/callback_api');
const amqpOptions = {
protocol: 'amqp',
hostname: process.env.RABBITMQ_HOST,
port: process.env.RABBITMQ_PORT,
username: process.env.RABBITMQ_USER,
password: process.env.RABBITMQ_PASS,
vhost: '/',
};
const message = Buffer.from(JSON.stringify({}));
amqp.connect(amqpOptions, (error0, connection) => {
if (error0) { throw error0; }
connection.createChannel((error1, channel) => {
if (error1) { throw error1; }
const correlationId = generateUuid();
channel.consume(replyQueue, (msg) => {
if (JSON.parse(msg.content).error) {
console.log(JSON.parse(msg.content));
const error = JSON.parse(msg.content.toString());
return next(error);
}
console.log(JSON.parse(msg.content));
console.log('msg:',msg);
const {tunnel_info} = JSON.parse(msg.content.toString());
}, {noAck: true});
channel.sendToQueue(`${brokerUri}`,
message, {correlationId, contentType: 'application/json', contentEncoding: 'utf8', replyTo: replyQueue});
});
});
Because channel is unidirectional. You should use two different channels for Publish and Consume.
AMQP specification says:
Channels are unidirectional, and thus at each connection endpoint the incoming and outgoing channels are completely distinct.

Sending data / payload to the Google Chrome Push Notification with Javascript

I'm working on the Google Chrome Push Notification and I'm trying to send the payload to the google chrome worker but, I have no idea how I receive this payload.
I have an API to create and save the notifications in my database and I need send the values through the https://android.googleapis.com/gcm/send and receive on the worker.js
This is my worker.js
self.addEventListener('push', function(event) {
var title = 'Yay a message.';
var body = 'We have received a push message.';
var icon = '/images/icon-192x192.png';
var tag = 'simple-push-demo-notification-tag';
event.waitUntil(
self.registration.showNotification(title, {
body: body,
icon: icon,
tag: tag
})
);
});
And this is how I'm calling the GCM
curl --header "Authorization: key=AIzaSyDQjYDxeS9MM0LcJm3oR6B7MU7Ad2x2Vqc" --header "Content-Type: application/json" https://android.googleapis.com/gcm/send -d "{ \"data\":{\"foo\":\"bar\"}, \"registration_ids\":[\"APA91bGqJpCmyCnSHLjY6STaBQEumz3eFY9r-2CHTtbsUMzBttq0crU3nEXzzU9TxNpsYeFmjA27urSaszKtA0WWC3yez1hhneLjbwJqlRdc_Yj1EiqLHluVwHB6V4FNdXdKb_gc_-7rbkYkypI3MtHpEaJbWsj6M5Pgs4nKqQ2R-WNho82mnRU\"]}"
I tried to get event.data but, this is undefined.
Does anyone have any idea or sugestion?
Unfortunately it seems like an intended behavior:
A downside to the current implementation of the Push API in Chrome is
that you can’t send a payload with a push message. Nope, nothing. The
reason for this is that in a future implementation, payload will have
to be encrypted on your server before it’s sent to a push messaging
endpoint. This way the endpoint, whatever push provider it is, will
not be able to easily view the content of the push payload. This also
protects against other vulnerabilities like poor validation of HTTPS
certificates and man-in-the-middle attacks between your server and the
push provider. However, this encryption isn’t supported yet, so in the
meantime you’ll need to perform a fetch request to get information
needed to populate a notification.
As stated above, the workaround is to contact back your backend after receiving the push and fetch the stored data on the 3rd party server.
#gauchofunky's answer is correct. With some guidance from the folks on the Chromium dev slack channel and #gauchofunky I was able to piece something together. Here's how to work around the current limitations; hopefully my answer becomes obsolete soon!
First figure out how you're going to persist notifications on your backend. I'm using Node/Express and MongoDB with Mongoose and my schema looks like this:
var NotificationSchema = new Schema({
_user: {type: mongoose.Schema.Types.ObjectId, ref: 'User'},
subscriptionId: String,
title: String,
body: String,
sent: { type: Boolean, default: false }
});
Be sure to add an icon if you'd like to alter the icon. I use the same icon every time so mine's hardcoded in the service worker.
Figuring out the correct REST web service took some thought. GET seemed like an easy choice but the call to get a notification causes side effects, so GET is out. I ended up going with a POST to /api/notifications with a body of {subscriptionId: <SUBSCRIPTION_ID>}. Within the method we basically perform a dequeue:
var subscriptionId = req.body.subscriptionId;
Notification
.findOne({_user: req.user, subscriptionId: subscriptionId, sent: false})
.exec(function(err, notification) {
if(err) { return handleError(res, err); }
notification.sent = true;
notification.save(function(err) {
if(err) { return handleError(res, err); }
return res.status(201).json(notification);
});
});
In the service worker we need to for sure get the subscription before we make the fetch.
self.addEventListener('push', function(event) {
event.waitUntil(
self.registration.pushManager.getSubscription().then(function(subscription) {
fetch('/api/notifications/', {
method: 'post',
headers: {
'Authorization': 'Bearer ' + self.token,
'Accept': 'application/json',
'Content-Type': 'application/json'
},
body: JSON.stringify(subscription)
})
.then(function(response) { return response.json(); })
.then(function(data) {
self.registration.showNotification(data.title, {
body: data.body,
icon: 'favicon-196x196.png'
});
})
.catch(function(err) {
console.log('err');
console.log(err);
});
})
);
});
It's also worth noting that the subscription object changed from Chrome 43 to Chrome 45. In Chrome 45 the subscriptionId property was removed, just something to look out for - this code was written to work with Chrome 43.
I wanted to make authenticated calls to my backend so I needed to figure out how to get the JWT from my Angular application to my service worker. I ended up using postMessage. Here's what I do after registering the service worker:
navigator.serviceWorker.register('/service-worker.js', {scope:'./'}).then(function(reg) {
var messenger = reg.installing || navigator.serviceWorker.controller;
messenger.postMessage({token: $localStorage.token});
}).catch(function(err) {
console.log('err');
console.log(err);
});
In the service worker listen for the message:
self.onmessage.addEventListener('message', function(event) {
self.token = event.data.token;
});
Strangely enough, that listener works in Chrome 43 but not Chrome 45. Chrome 45 works with a handler like this:
self.addEventListener('message', function(event) {
self.token = event.data.token;
});
Right now push notifications take quite a bit of work to get something useful going - I'm really looking forward to payloads!
Actually, payload should be implemented in Chrome 50 (release date - April 19, 2016). In Chrome 50 (and in the current version of Firefox on desktop) you can send some arbitrary data along with the push so that the client can avoid making the extra request. All payload data must be encrypted.
Here is the the encryption details from developer : https://developers.google.com/web/updates/2016/03/web-push-encryption?hl=en
I just ran into this problem. Newer versions of firefox and chrome( version 50+) support payload transferring. The dev docs here details the implementation on how this works. An important thing to note is that google GCM or possibly client/chome (I dont know which one) will actually ignore the payload entirely if it is not encrypted.
This website has both client/server implementations of how to do the push and retrieval through service workers. The push library that examples use is merely a wrapper around a normal REST call
service worker example implementation:
self.addEventListener('push', function(event) {
var payload = event.data ? event.data.text() : 'no payload';
event.waitUntil(
self.registration.showNotification('ServiceWorker Cookbook', {
body: payload,
})
);
});
Server example implementation:
var webPush = require('web-push');
webPush.setGCMAPIKey(process.env.GCM_API_KEY);
module.exports = function(app, route) {
app.post(route + 'register', function(req, res) {
res.sendStatus(201);
});
app.post(route + 'sendNotification', function(req, res) {
setTimeout(function() {
webPush.sendNotification(req.body.endpoint, {
TTL: req.body.ttl,
payload: req.body.payload,
userPublicKey: req.body.key,
userAuth: req.body.authSecret,
}).then(function() {
res.sendStatus(201);
});
}, req.body.delay * 1000);
});
};
Client side javascript implementation example of printing out the the required fields.
navigator.serviceWorker.register('serviceWorker.js')
.then(function(registration) {
return registration.pushManager.getSubscription()
.then(function(subscription) {
if (subscription) {
return subscription;
}
return registration.pushManager.subscribe({
userVisibleOnly: true
});
});
}).then(function(subscription) {
var rawKey = subscription.getKey ? subscription.getKey('p256dh') : '';
key = rawKey ? btoa(String.fromCharCode.apply(null, new Uint8Array(rawKey))) : '';
var rawAuthSecret = subscription.getKey ? subscription.getKey('auth') : '';
authSecret = rawAuthSecret ? btoa(String.fromCharCode.apply(null, new Uint8Array(rawAuthSecret))) : '';
endpoint = subscription.endpoint;
console.log("Endpoint: " + endpoint);
console.log("Key: " + key);
console.log("AuthSecret: " + authSecret);
});
To retrieve that data, you need to parse "event.data.text()" to a JSON object. I'm guessing something was updated since you tried to get this to work, but it works now. Unlucky!
However, since I made it to this post when searching for a solution myself, others would probably like a working answer. Here it is:
// Push message event handler
self.addEventListener('push', function(event) {
// If true, the event holds data
if(event.data){
// Need to parse to JSON format
// - Consider event.data.text() the "stringify()"
// version of the data
var payload = JSON.parse(event.data.text());
// For those of you who love logging
console.log(payload);
var title = payload.data.title;
var body = payload.data.body;
var icon = './assets/icons/icon.ico'
var tag = 'notification-tag';
// Wait until payload is fetched
event.waitUntil(
self.registration.showNotification(title, {
body: body,
icon: icon,
tag: tag,
data: {} // Keeping this here in case I need it later
})
);
} else {
console.log("Event does not have data...");
}
}); // End push listener
// Notification Click event
self.addEventListener('notificationclick', function(event) {
console.log("Notification Clicked");
}); // End click listener
Personally, I will be creating a "generic" notification in case my data is funky, and will also be using try/catch. I suggest doing the same.
Follow these steps to achieve this:
In the browser:
You need to get the subscription object and save it, so your server has access to it: Read more about it
navigator.serviceWorker.ready.then(serviceWorkerRegistration => {
serviceWorkerRegistration.pushManager.subscribe({userVisibleOnly: true})
.then(subscription => {
//save subscription.toJSON() object to your server
})});
In the server:
install web-push npm package
And send a web push like this:
const webpush = require('web-push');
setImmediate(async () => {
const params = {
payload: {title: 'Hey', body: 'Hello World'}
};
//this is the subscription object you should get in the browser. This is a demo of how it should look like
const subscription = {"endpoint":"https://android.googleapis.com/gcm/send/deC24xZL8z4:APA91bE9ZWs2KvLdo71NGYvBHGX6ZO4FFIQCppMsZhiTXtM1S2SlAqoOPNxzLlPye4ieL2ulzzSvPue-dGFBszDcFbSkfb_VhleiJgXRA8UwgLn5Z20_77WroZ1LofWQ22g6bpIGmg2JwYAqjeca_gzrZi3XUpcWHfw","expirationTime":null,"keys":{"p256dh":"BG55fZ3zZq7Cd20vVouPXeVic9-3pa7RhcR5g3kRb13MyJyghTY86IO_IToVKdBmk_2kA9znmbqvd0-o8U1FfA3M","auth":"1gNTE1wddcuF3FUPryGTZOA"}};
if (subscription.keys) {
params.userPublicKey = subscription.keys.p256dh;
params.userAuth = subscription.keys.auth;
}
// this key you should take from firebase console for example
// settings -> cloud messaging -> Server key
webpush.setGCMAPIKey('AAAASwYmslc:APfA91bGy3tdKvuq90eOvz4AoUm6uPtbqZktZ9dAnElrlH4gglUiuvereTJJWxz8_dANEQciX9legijnJrxvlapI84bno4icD2D0cdVX3_XBOuW3aWrpoqsoxLDTdth86CjkDD4JhqRzxV7RrDXQZd_sZAOpC6f32nbA');
try {
const r = await webpush.sendNotification(subscription, JSON.stringify(params));
console.log(r);
}
catch (e) {
console.error(e);
}
});

Categories