Socket.io HTML - Send unique content to unique clients' HTML page - javascript

I'm using NodeJS and Socket.io. I'm getting a query from the user (https://localhost:3000?id=12345) and using this query I want to send this specific user unique content.
Now my problem is that all clients are getting data according to the last query from the last client.
I've tried to make rooms and multiple other solutions that I've thought about or found online but with no luck.
I'm getting the query of the page like that,
app.get('/', async ({ query }, response) => { ... }
Client
$(document).ready(function () {
socket.emit('room', key);
socket.on('a1', function (data) {
$("#a1").empty().append(data);
});
socket.on('a2', function (data) {
$("#a2").empty().append(data);
});
socket.on('a3', function (data) {
$("#a3").empty().append(data);
});
socket.on('a4', function (data) {
$("#a4").empty().append(data);
});
…
Server
io.on('connection', function (socket) {
console.log('Connection');
socket.removeAllListeners();
socket.on('create', function (data) {
console.log(`Data: ${data}`);
socket.join(data);
io.in(data).emit('a1', a1);
io.in(data).emit('a2', a2);
io.in(data).emit('a3', a3);
io.in(data).emit('a4', a4);
})
});
…
I really can't understand how to separate each client query from the server and then send unique content to each client without one client's content interfering with another.
My initial thought was to make one room per client and send their unique content by using that room but for some reason, it's like the content is ending up in every client.
Thank you very much!

Is just you turn the object used to manage the content better like this:
The clientside:
$(document).ready(function () {
const userKey = 'unique-id-for-each-user'
socket.emit('room', {key : key, userKey : userKey});
//if you emit something in this channel (same as userKey) on server will get here
socket.on(userKey, function (data) {
if(data.key === 'a1')
$("#a1").empty().append(data.content);
if(data.key === 'a2')
$("#a2").empty().append(data.content);
if(data.key === 'a3')
$("#a3").empty().append(data.content);
});
//if you emit something in this channel ('allClients') on server will get here
socket.on('allClients', function(data) {
if(data.key === 'a1')
$("#a1").empty().append(data.content);
if(data.key === 'a2')
$("#a2").empty().append(data.content);
if(data.key === 'a3')
$("#a3").empty().append(data.content);
})
});
The serverside:
io.on('connection', function (socket) {
console.log('Connection');
socket.removeAllListeners();
socket.on('room', function (data) {
console.log(`Data: ${data}`);
if(data.key === 'a1')
io.sockets.emit(data.userKey, {key : 'a1', content: a1});
if(data.key === 'a2')
io.sockets.emit(data.userKey, {key : 'a2', content: a2});
if(data.key === 'a3')
io.sockets.emit(data.userKey, {key : 'a3', content: a3});
if(data.key === 'a4')
io.sockets.emit(data.userKey, {key : 'a4', content: a4});
})
});
As you see, i just change the variable you pass as parameter to give me what i need, as serverside as clientside.

Related

How can different data is transferred from the server?

client is esp32cam(camera module)
It is send Image to app.js every 3seconds
app.js
client.on("message", async(topic, message)=>{ // {"LED" : "ON"} or {"MOTER" : "ON"}
if(topic == "JPG")
{
var obj = message.toString();
app.get("/img", function(req,res,next){
res.set("content-Type", "text/json");
//res.send(JSON.stringify({data : obj}));
res.json({
data: obj || "no image yet"
});
console.log(obj);//number 1
});
console.log(obj);//number 2
}
}
number 1 is print same data every 3seconds
number 2 is print different data every 3seconds
how can number1 print different data each time??
html
$(function() {
timer = setInterval( function () {
console.log("timer")
$.ajax({
url: "http://ip:3000/img",
type: "get",
context : this,
cache : false,
error : function(request,status,error){
console.log("code:"+request.status+"\n"+"message:"+request.responseText+"\n"+"error:"+error);
},
success:function(obj){
$("#imguri").html('<img src="'+obj.data+'">');
$("#imguri").attr("src", obj.data);
console.log("mqtt in " + obj.data);
// console.log("TEST");
}
});
}, 3000);
});
my problem
This is because you are stacking multiple app.get("/img",...) routes (one for every incoming message).
Only the first one will ever be called, so the HTTP client will always just get the very first image.
You should make the MQTT client callback store the new image in a global variable and then just have one instance of the /img route return that global variable.
var img;
client.on("message", async(topic, message)=>{ // {"LED" : "ON"} or {"MOTER" : "ON"}
if(topic == "JPG")
{
var obj = message.toString();
img = obj
}
}
app.get("/img", function(req,res,next){
res.set("content-Type", "text/json");
res.json({
data: img || "no image yet"
});
console.log(obj);//number 1
});

webrtc video chat doesn't work when 3rd person joins the chat

im trying webrtc for the first time for a video chat app , i want up to 3 person in each chat ... my code works fine with 2 person chat
but as soon as 3rd person joins the chat everything goes wrong ... i get multiple video tags in the page and none of them are from the 3rd pear .... i'd appreciate any pointers or suggestion most tutorials cover 2 person chat
here is working url
https://chate-test-3000.herokuapp.com/
here is my code
const PEARS = [];
var video_counter = 0 ;
const STREAMES = [] ;
var myVideoArea = document.querySelector('#myvideo');
var configuration = {
'iceServers': [{
'url': 'stun:stun.l.google.com:19302'
}]
};
var rtcPeerConn;
const ROOM = 'caht1';
const SIGNAL_ROOM = 'newsingal1234567898765';
io = io.connect("" , {transports:['websocket']});
io.emit('ready' , { chat_room : ROOM , signaling_room : SIGNAL_ROOM});
io.emit('signal' , { text :'ready for video ? ' , room : SIGNAL_ROOM , type : 'user_here'});
io.on('signlaing_message' , function(data){
console.log('signal recived');
console.log(data);
if(!PEARS.includes(data.pear_id))
{
console.log('adding new pear --- ' , data.pear_id);
PEARS.push(data.pear_id);
startSignaling(data.pear_id);
}
if (data.type != "user_here")
{
var message = JSON.parse(data.message);
if (message.sdp) {
rtcPeerConn.setRemoteDescription(new RTCSessionDescription(message.sdp), function () {
// if we received an offer, we need to answer
if (rtcPeerConn.remoteDescription.type == 'offer') {
rtcPeerConn.createAnswer(sendLocalDesc, logError);
}
}, logError);
}
else {
rtcPeerConn.addIceCandidate(new RTCIceCandidate(message.candidate));
}
}
})
function startSignaling(pear_id) {
if(!rtcPeerConn)
rtcPeerConn = new RTCPeerConnection(configuration);
// send any ice candidates to the other peer
rtcPeerConn.onicecandidate = function (evt) {
if (evt.candidate)
io.emit('signal',{"type":"ice candidate", "message": JSON.stringify({ 'candidate': evt.candidate }), "room":SIGNAL_ROOM});
displaySignalMessage("completed that ice candidate...");
};
// let the 'negotiationneeded' event trigger offer generation
rtcPeerConn.onnegotiationneeded = function () {
displaySignalMessage("on negotiation called");
rtcPeerConn.createOffer(sendLocalDesc, logError);
}
// once remote stream arrives, show it in the remote video element
rtcPeerConn.ontrack = function (evt) {
displaySignalMessage("going to add their stream...");
video_counter++ ;
let vid = 'video-box-'+video_counter ;
console.log('adding new STREAM !!')
console.log('###### streams ' , evt.streams);
if(!STREAMES.includes(evt.streams[0].id))
{
STREAMES.push(evt.streams[0].id);
$('#video-wrapper').append(`<video data-id="${evt.streams[0].id}" id="${vid}" autoplay loop autobuffer muted playsinline controls></video>`);
console.log(' video length ..... ' , $('#video-wrapper').find('#'+vid).length );
var theirVideoArea = $('#video-wrapper').find('#'+vid)[0];
console.log(theirVideoArea);
theirVideoArea.srcObject = evt.streams[0] ;
theirVideoArea.play();
}
};
// get a local stream, show it in our video tag and add it to be sent
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
navigator.getUserMedia({
'audio': true,
'video': true
}, function (stream) {
displaySignalMessage("going to display my stream...");
myVideoArea.srcObject = stream
myVideoArea.play();
for (const track of stream.getTracks()) {
rtcPeerConn.addTrack(track, stream);
}
}, logError);
}
function sendLocalDesc(desc) {
rtcPeerConn.setLocalDescription(desc, function () {
displaySignalMessage("sending local description");
io.emit('signal',{"type":"SDP", "message": JSON.stringify({ 'sdp': rtcPeerConn.localDescription }), "room":SIGNAL_ROOM});
}, logError);
}
function logError(error) {
$('#error-area').append(`<div> ${error.name} : ${error.message}</div>`);
}
function displaySignalMessage(text ){
$('#signal-area').append(`<div>${text}</div>`);
}
i also use a simple nodejs server for signaling and use socket.io to connect to the server
------------------------- EDIT - PEER.JS -----------------
here is my code after switching to peerjs
const SIGNAL_ROOM = 'zxsingalroom';
var MY_PEER_ID = '' ;
const CurrentPeers = [] ;
io = io.connect("" , {transports:['websocket']});
io.emit('ready' , { chat_room : ROOM , signaling_room : SIGNAL_ROOM});
var peer = new Peer({
config: {'iceServers': [
{ url: 'stun:stun.l.google.com:19302' },
]} /* Sample servers, please use appropriate ones */
});
peer.on('open', function(id) {
console.log('My peer ID is: ' + id);
MY_PEER_ID = id ;
io.emit('peer_id_offer' , { chat_room : ROOM , id : id});
});
peer.on('call' , function (call) {
navigator.mediaDevices.getUserMedia({ video : true , audio : true })
.then((stream) => {
call.answer(stream);
call.on('stream' , function(remoteStream){
if(!CurrentPeers.includes(call.peer))
{
CurrentPeers.push(call.peer);
addRemoteVideo(remoteStream);
}
})
})
.catch( (e)=>{
console.log('error2' , e );
});
})
io.on('peer_id_recived' , function(data){
console.log(`peer id recived : `);
console.log(data);
for (let [key, value] of Object.entries(data.peer_ids)) {
if(value.peer_id != MY_PEER_ID)
{
callPeer(value.peer_id);
}
}
});
function callPeer( id )
{
console.log('calling peers 1 .... ');
navigator.mediaDevices.getUserMedia({ video : true , audio : true })
.then( (stream) => {
console.log('calling peers 2 .... ' + id);
addOurVideo(stream);
let call = peer.call(id , stream);
console.log( typeof call);
call.on('stream' , function(remoteStream){
console.log('calling peers 3 .... ');
if(!CurrentPeers.includes(call.peer))
{
CurrentPeers.push(call.peer);
addRemoteVideo(remoteStream);
}
})
})
.catch( (e)=>{
console.log('error1' , e );
});
}
function addRemoteVideo(stream){
console.log(' adding remote stream!!!');
let total_perrs = CurrentPeers.length ;
let vid = `video-box-${total_perrs}`;
$('#video-wrapper').append(`<video id="${vid}" autoplay loop autobuffer muted playsinline controls></video>`);
var theirVideoArea = $('#video-wrapper').find('#'+vid)[0];
theirVideoArea.srcObject = stream ;
theirVideoArea.play();
}
function addOurVideo(stream){
console.log(' adding our stream');
var ourVideArea = $('#video-wrapper').find('#our-video')[0];
ourVideArea.srcObject = stream ;
ourVideArea.play();
}
You should use some sort of P2P or Media Server to handle multiple simultaneous connections from different clients PeerJS is a great option.
for the WebRTC: ICE failed, add a TURN server and see about:webrtc for more details error its exactly what it says STUN servers are used to create the connection but if the P2P connection cannot be established, the fallback is that all the communication goes through a TURN server, so they need high resources and bandwidth.
TURN servers are generally not free but one open source option that might fix your problem is using a COTURN server https://github.com/coturn/coturn
you should put the following example config in your PeerJS options
"iceServers": [
{
"urls": "stun:vc.example.com:3478"
},
{
"urls": "turn:vc.example.com:3478",
"username": "coturnUser",
"credential": "coturnUserPassword"
}
],
you could specify "iceTransportPolicy": "relay" before urls to only use relay server(without P2P)

How do you retrieve a value from IndexedDB when using serviceworkers?

I am new to IndexedDB and serviceworkers and am having a very difficult time understanding how to turn these into a funcitonal application. I've done extensive reading on both, but even the "complete" examples don't incorporate the two.
I am tasked with creating an application that will allow users to work offline. The first time they connect to the site, I want to pull specific information from the database and store it in IndexedDB. When they go offline, I need to use that data to display information on the page. Certain interactions will cause the data to update, then to be synced later once an internet connection is reestablished. From a high-level, I udnerstand how this works.
It is my understanding that we cannot call functions from the serviceworker.js file due to the asynchronous nature of serviceworkers. Additionally, serviceworkers.js cannot directly update the DOM. However, the examples I have seen are creating and managing the IndexedDB data within the serviceworkers.js file.
So let's say I have a file:
<!-- index.html -->
<html>
<body>
Hello <span id="name"></span>
</body>
</html>
And a serviceworker.js:
var CACHE_NAME = 'my-cache-v1';
var urlsToCache = [
'/'
// More to be added later
];
self.addEventListener('install', function(event) {
// Perform install steps
event.waitUntil(
caches.open(CACHE_NAME)
.then(function(cache) {
console.log('Opened cache');
return cache.addAll(urlsToCache);
})
);
});
self.addEventListener('activate', function(event) {
event.waitUntil(
createDB() //Use this function to create or open the database
);
});
self.addEventListener('fetch', function(event) {
event.respondWith(
caches.match(event.request)
.then(function(response) {
// Cache hit - return response
if (response) {
return response;
}
return fetch(event.request).then(
function(response) {
// Check if we received a valid response
if(!response || response.status !== 200 || response.type !== 'basic') {
return response;
}
var responseToCache = response.clone();
caches.open(CACHE_NAME)
.then(function(cache) {
cache.put(event.request, responseToCache);
});
return response;
}
);
})
);
});
function createDB() {
idb.open('mydata', 1, function(upgradeDB) {
var store = upgradeDB.createObjectStore('user', {
keyPath: 'id'
});
store.put({id: 1, name: 'John Doe'}); //This can be updated with an AJAX call to the database later
});
}
How do I now update the element "name" with the value for key = 1 from the "user" objectstore in the "mydata" database?
Depending on your use case, you've got several options :
You dont need the service worker. Just pull your data from iDB directly from the page. The DOM has access to iDB.
Set a template for your index.html. At the activate step in service worker, pre-render the page with the value from iDB and cache it.

Getting messages in chat history to display in messenger Pubnub

I have come to post this question after 2 days of torture not being able to understand how I can actually publish the historic messages stored on my pubnub storage account. To try and understand it at its most basic I have made a chat app and used the history function as described in the SDK but still every time I refresh the page the messages are lost. I have tried the backfill and the restore attributes in subscribe with no luck. All I want to do is click refresh on chrome and see the messages still there.
<div><input id=input placeholder=you-chat-here /></div>
Chat Output
<div id=box></div>
<script src="https://cdn.pubnub.com/sdk/javascript/pubnub.4.4.0.min.js"></script>
<script>(function(){
var pubnub = new PubNub({ publishKey : 'demo', subscribeKey : 'demo' });
function $(id) { return document.getElementById(id); }
var box = $('box'), input = $('input'), channel = 'chat';
pubnub.addListener({
message: function(obj) {
box.innerHTML = (''+obj.message).replace( /[<>]/g, '' ) + '<br>' + box.innerHTML
}});
pubnub.history({
channel: 'chat',
reverse: true, // Setting to true will traverse the time line in reverse starting with the oldest message first.
count: 100, // how many items to fetch
callback : function(msgs) {
pubnub.each( msgs[0], chat );
}
},
function (status, response) {
// handle status, response
console.log("messages successfully retreived")
});
pubnub.subscribe({channels:[channel],
restore: true,
backfill: true,
ssl: true});
input.addEventListener('keyup', function(e) {
if ((e.keyCode || e.charCode) === 13) {
pubnub.publish({channel : channel, message : input.value,x : (input.value='')});
}
});
})();
</script>
</body>
EDIT: updated link that was broken. New version of history function is called fetchMessages.
I think your history code is not correct. No need for the callback as your code response will be in the function argument. This example is from the JavaScript SDK docs.
// deprecated function
pubnub.history(
{
channel: 'chat',
},
function (status, response) {
var msgs = response.messages;
if (msgs != undefined && msgs.length > 0) {
// if msgs were retrieved, do something useful
console.log(msgs);
}
}
);
// latest function (response output format has changed)
pubnub.fetchMessages(
{
channels: ['chat']
},
(status, response) => {
console.log(msgs);
}
);

How to access tcp socket from another socket?

I have several client connections and one should listen for data in other. How can I make a bridge between them (not an array iterator)?
Sample pseudo-code:
socket.on('data', function(chunk){
decodeChunk(chunk.toString('hex'), function(response){
if(response.name === 'toDB'){
//write smth. to DB
}
if(response.name === 'readDB'){
//watch all incoming tcp sockets with 'toDB' and continiously
//send to client connected with this socket
}
});
})
I can use something like rabbitMQ, but it's not for current stage in order to keep development more agile.
Any suggestions appreciated.
It's a bit tricky to address this with purely pseudo code, but I ll give it a shot anyway.
If I understand this correctly, then toDB and readDB are sample data and there are multiple toDB. Which means, you'd also have a bunch of toXY with a corresponding readXY OR a bunch of toPQ with corresponding readPQ. If so, then as soon as a toSOMETHING is connected, you can store that socket under a readSOMETHING. If the key already exists, then you d simply append the toSOMETHING socket to it. In time it would look something like this:
var socketData = {
readDB: {
sockets: [
<socket-object-toDB-1>,
<socket-object-toDB-2>,
<socket-object-toDB-3>
]
},
readXY: {
sockets: [
<socket-object-toXY-1>,
<socket-object-toXY-2>,
<socket-object-toXY-3>
]
}
}
And then your pseudo code would probably look something like
socket.on('data', function(chunk){
decodeChunk(chunk.toString('hex'), function(response){
if(response.name === 'toDB'){
//write smth. to DB
if (socketData['readDB']) {
// already exists, simply append incoming socket to it's sockets array
socketData['readDB'].sockets.push(socket);
} else {
// does not exist, add it with a sockets array
socketData['readDB'] = {
sockets: [socket]
}
}
if(response.name === 'readDB'){
// Write to all sockets of readDB
socketData['readDB'].sockets.forEach(function(sock) {
sock.write('Whatever message\r\n');
});
}
});
})
Ended up with such service:
var SocketMedium = {
sessions: {},
onEvent: function (event, cb) {
if (!this.sessions[event]) {
this.sessions[event] = [];
this.sessions[event].push(cb);
} else {
this.sessions[event].push(cb);
}
console.log('onEvent: ', event);
},
rTrigger: function (event, data) {
console.log('Started rTrigger: ', event);
if(this.sessions[event]){
console.log('FOUND: ', event);
this
.sessions[event]
.forEach(function (cb) {
cb(data);
})
}
console.log('Completed rTrigger', this.sessions);
},
cleaner: function(sessionId){
var event = 'session' + sessionId;
if (this.sessions[event]) {
delete this.sessions[event];
console.log('SocketMedium cleaned: ', this.sessions[event]);
}
console.log('SocketMedium can not find: ', this.sessions[event]);
}
};
module.exports = SocketMedium;

Categories