So, I am currently appending to my web page and or removing from it based on values stored in an array.
But in order for them to appear I need to reload the page, but I need it all to happen on the fly without reloading the page. So as soon as I append I can see it, same goes for when I remove it.
I have been using socket.io server side code to try and do it but I have had mixed results. Am not too experienced with socket.io and still trying to get to grips with it.
Am literally just trying to emit data based on what is stored in the array chanArr in real time by appending it to the table.
app.js file
This is my server side code for socket.io, the function updateSip is what is emitting the data back to the client side.
var ari = require('ari-client');
var util = require('util');
var chanArr = [];
var express = require('express'),
app = express(),
server = require('http').createServer(app),
io = require('socket.io').listen(server);
//ARI client
ari.connect('http://localhost:8088', 'asterisk', 'asterisk', clientLoaded);
function clientLoaded(err, client) {
if (err) {
throw err;
}
// find or create a holding bridges
var bridge = null;
client.bridges.list(function (err, bridges) {
if (err) {
throw err;
}
bridge = bridges.filter(function (candidate) {
return candidate.bridge_type === 'mixing';
})[0];
if (bridge) {
console.log(util.format('Using bridge %s', bridge.id));
} else
client.bridges.create({
type : 'mixing'
}, function (err, newBridge) {
if (err) {
throw err;
}
bridge = newBridge;
console.log(util.format('Created bridge %s', bridge.id));
});
}
});
// handler for StasisStart event
function stasisStart(event, channel) {
console.log(util.format(
'Channel %s just entered our application, adding it to bridge %s',
channel.name,
bridge.id));
channel.answer(function (err) {
if (err) {
throw err;
}
bridge.addChannel({
channel : channel.id
}, function (err) {
var id = chanArr.push(channel.name)
updateSip();
console.log("User: " + channel.name);
if (err) {
throw err;
}
//If else statement to start music for first user entering channel, music will stop once more than 1 enters the channel.
if (chanArr.length <= 1) {
bridge.startMoh(function (err) {
if (err) {
throw err;
}
});
} else {
bridge.stopMoh(function (err) {
if (err) {
throw err;
}
});
}
});
});
}
// handler for StasisEnd event
function stasisEnd(event, channel) {
console.log(util.format(
'Channel %s just left our application', channel.name));
console.log(channel.name);
var index = chanArr.indexOf(channel.name);
chanArr.splice(index, 1);
updateSip();
}
client.on('StasisStart', stasisStart);
client.on('StasisEnd', stasisEnd);
client.start('bridge-hold');
}
//Socket.io logic here
server.listen(3009, function () {
console.log('listening on *:3009');
});
app.use(express.static(__dirname + '/public'));
app.get('/', function (req, res) {
res.sendfile(__dirname + "/testPage.html");
});
io.sockets.on('connection', function () {
updateSip();
});
io.sockets.on('updateSip', function () {
console.log('Being called!')
updateSip();
});
function updateSip() {
io.sockets.emit('sip', chanArr);
}
test.js
Client side JQuery and socket.io code and where am appending elements to the web page.
jQuery(function ($) {
var socket = io.connect();
var $sip = $('#sip');
socket.on('sip', function (data) {
var sip = '';
for (i = 0; i < data.length; i++) {
sip += data[i]
if(sip){
$sip.append('<tr>\
<td>' + sip + '</td>\
<td><input type="checkbox" data-on="Voice" data-off="Muted" checked data-toggle="toggle" data-onstyle="success" data-offstyle="danger"></td>\
<td><button class="btn btn-default kick" id="kick" data-toggle="modal" data-target="#myModal" type="submit">Kick</button></td>\
</tr>');
}
else{
$sip.append('Currently no extensions');
}
sip = '';
}
});
});
testPage.html
What I am appending too.
<div class="secondary-bridge">
<h3 class="conf-head">Conference call:</h3>
<table class="table table-bordered">
<thead>
<tr>
<th>Extension</th>
<th>Mute</th>
<th>Kick</th>
</tr>
</thead>
<tbody id ="sip">
</tbody>
</table>
EDIT:
So at the moment, I have to refresh the page so the page runs the updateSip function, to then emit and display the new values which have been appended, every time I add, or remove a value from the array which handles what to display I have to refresh the page.
Page loads -> Array[]-> Call, inserts value in array-> Array['SIP-448']-> Reload page ->Can now see it appended.
I am trying to get it to work so I never have to refresh.
First thing I tried before asking for some advice was the method #show-me-the-code suggested and the following happens, it does do kinda what I want it to do but after 1 value, it breaks, and values when they are removed from the array also still stay displayed on the webpage:
1 Value
More than 1 value
When I refresh the page it then goes to this:
When I then try to remove a value from the array it then appends the previous value, once I refresh the page there is only one value which is the on which should remain.
Which is my big issue I dont know why its acting the way it is currently.
Here's what I would do. On the server side where your chanArr changes (elements are added or removed), trigger an emit('sip', chanArr) call.
// right after adding channel to the array
chanArr.push(channel.name);
updateSip();
// also after removing it from the array if it is in different function
chanArr.splice(index, 1);
updateSip();
If I understood you correctly you want to update data of your server to your clientside.
The problem with your code is that you only call the function updateSip() on client-connection. If you want to update it somewhen else you have to provide more code to your app.js
You might, for instance, create an event listener like
io.sockets.on('updateSip', function () {
updateSip();
});
and on your clientside you might create a function like
function updateSip() {
socket.emit('updateSip');
},
that way your server answers with the information you need if you call the function updateSip() in your test.js
Related
I'm using a RabbitMQ queue to publish and receive messages between consumers, the main issue is that I want to receive a single message then exit. From other answers on this site I've seen that channel.get seems to be the best way to do this. However, I can't get it to work. This is the answer I've been using.
My current code:
var amqpChannel = null;
var queue = "test";
amqp.connect(cluster, (error0, connection) => {
if (error0) throw error0;
connection.createChannel((error1, channel) => {
if (error1) throw error1;
amqpChannel = channel;
});
});
var readMessage = function() {
if (amqpChannel)
{
amqpChannel.get(queue, (err, msg) => {
if (err) console.log(err);
if (msg)
{
console.log("Message received: " + msg.content.toString());
amqpChannel.ack(msg);
}
});
}
}
setTimeout(readMessage, 1000);
As far as I can see, it is identical to the code in the accepted answer above, however I can't seem to get it to work. What am I missing?
Edit: Extra info
Using channel.consume works for me, it gets whatever messages are in the queue. However using the channel.get method results in nothing. I have used console.log lines to ensure the channel is being created properly, but for some reason the channel.get callback is never being triggered. I know that all the connection and queue creation is all working, I just can't seem to get the channel.get method to trigger at all.
Edit 2: I found the issue
My callback function wasn't correct. Looking at the documentation here, channel.get requires an options parameter before the callback, adding that in fixed my issue. My working code is now:
var amqpChannel = null;
var queue = "test";
amqp.connect(cluster, (error0, connection) => {
if (error0) throw error0;
connection.createChannel((error1, channel) => {
if (error1) throw error1;
amqpChannel = channel;
});
});
var readMessage = function() {
if (amqpChannel)
{
amqpChannel.get(queue, {noAck: true}, (err, msg) => {
if (err) console.log(err);
if (msg)
{
console.log("Message received: " + msg.content.toString());
amqpChannel.ack(msg);
}
});
}
}
setTimeout(readMessage, 1000);
I'm writing a simple online videogame with node js and I want to manage the score of each player saving it in a database(mysql).
Now in the server side I have a piece of code like this:
socket.on('game_over',function(data){
for (var i = 0; i < players.length; i++) {
if(players[i].id == data.id){
var sql;
sql='UPDATE login SET email=? WHERE username=?'
connection.query(sql, [data.score,"d"],function(error, results, fields) {
console.log(sql);
console.log(error);
if (error) throw error;
console.log(result);
});
players.splice(i,1);
break;
}
}
socket.broadcast.emit('p_disconnect',data.id);
});
When I start my server and a game_over signal is recived, my server disconnect.
The print of the sql query is correct and I don't see any error since it return me 'null'
Why my server disconnect after that, and more importantly what can I do to keep the server up?
Without the connection.query part it works like it should
Based on the comment on the question:
// assuming
const players = [
{id: 1, otherInfo: 'foobar' },
{id: 2, otherInfo: 'foobar' },
]
const connection = mysql.connect() // something like this
// When the game ends we assume that the game_over event is fired
socket.on('game_over',function (data) {
// "I need to find the correct player"
const correctPlayer = players.find(player => player.id === data.id)
// "and delete it from the list of the active players"
const position = players.indexOf(correctPlayer)
players.splice(position, 1)
// "and update the database with it's score" => depends on your DB structure
const query = `UPDATE youTable SET score = ${data.score} WHERE playerId = ${correctPlayer.id}`
// here depends on how you want to manage the query result (some examples)
// run query (is async cause how js works) and just log the result
connection.query(sql, function(error, results, fields) {
// this code is executed when the query ends
console.log(error, results, fields)
}
// this code is executed after starting the query
socket.broadcast.emit('p_disconnect', data.id);
// run query and emit event after the query ends
connection.query(sql, function(error, results, fields) {
socket.broadcast.emit('p_disconnect', data.id);
console.log(error, results, fields)
}
});
I am using Sails v0.11 and am developing an standalone importer script in order to import data to mongoDB and - that is now the not-working part - build the associations between the models.
For this process I introduced temporary helper properties in the models in order to find the associated records and replace them by in real MongoDB _ids.
The script starts Sails in order to be able use its features (waterline, etc.):
var app = Sails();
app.load({
hooks: { grunt: false },
log: { level: 'warn' }
}, function sailsReady(err){
processUsers() finds all users and their _ids and iterates over them to invoke a second function addOrgsToOneUser()
var processUsers = function() {
// Iterate through all users in order to retrieve their _ids and
app.models['user'].native(function(err, collection) {
collection.find({}, projectionOrgInUser).toArray(function (err, users) {
Async.eachSeries(users, function (user, next){
// prepare userInOrgs
whereUserInOrg = { orgId: { $in: userInOrgs } };
//This is invoking
addOrgsToOneUser(user, whereUserInOrg);
next();
}, function afterwards (err) {
if (err) {
console.error('Import failed, error details:\n',err);
return process.exit(1);
}
console.log("done");
return process.exit(0); // This returns too early, not executing the addOrgsToOneUser
});
});
});
};
addOrgsToOneUser() finds all orgs belonging to THIS user and updates then the orgs array property of THIS user
var addOrgsToOneUser = function(user, whereUserInOrg) {
var projectionUserInOrg = "...";
// Find all orgs that this user is associated to and store it in inOrgs
app.models['org'].native(function(err, collection) {
collection.find(whereUserInOrg, projectionUserInOrg).toArray(function (err, orgs) {
// prepare inOrgs which is needed for updating
//update user to have an updated orgs array based on inOrgs.
app.models['user'].update({'id' : user._id.toString()}, {'orgs': inOrgs}).exec(function afterwards(err, updated){
console.log('Updated user ' + user._id.toString() + ' to be in their orgs');
});
});
});
}
Problem:
Process.exit(0) is called before the query/update of saddOrgsToOneUser() has completed. It behaves as expected if saddOrgsToOneUser() contains just a console.log for instance, but queries are triggered ansynchronously of course.
In case I comment out Process.exit(0), the script never stops, but the queries are executed as intented.
As the script will have further nested queries, I need a better approach to this as manually kill this script ...
How is nesting queries and iterating over their results done properly?
Thank you very much,
Manuel
addOrgsToOneUser is asynchronous. next() needs to be called after everything is done inside addOrgsToOneUser. The way I would do it is to pass in a callback (next) and call it when everything is done. So the call is
addOrgsToOneUser(user, whereUserInOrg, next);
and the addOrgsToOneUser will have an extra argument:
var addOrgsToOneUser = function(user, whereUserInOrg, callback) {
var projectionUserInOrg = "...";
// Find all orgs that this user is associated to and store it in inOrgs
app.models['org'].native(function(err, collection) {
collection.find(whereUserInOrg, projectionUserInOrg).toArray(function (err, orgs) {
// prepare inOrgs which is needed for updating
//update user to have an updated orgs array based on inOrgs.
app.models['user'].update({'id' : user._id.toString()}, {'orgs': inOrgs}).exec(function afterwards(err, updated){
console.log('Updated user ' + user._id.toString() + ' to be in their orgs');
callback(); // your original next() is called here
});
});
});
}
Ive written and basic Node app (my first) to insert many csv rows into mongo (items array in the code below). Once all items have been inserted the db connection should be closed and the program exited.
The issue ive been working with is figuring out when to close the db connection once all inserts have returned a result. Ive gotten it working by counting all of the insert result callbacks but to me this feels clunky. I know one improvement I could make is to batch the inserts via an array to the insert function but ill still need to have my code be aware of when all inserts have completed (assuming it would be bad to insert 100k items in one query). Is there and better way (my code feels hacky) to do this?
Hack part...
function (err, result) {
queryCompletedCount++;
if (err) console.log(err);
//Not sure about doing it this way
//Close db once all queries have returned a result
if (queryCompletedCount === items.length) {
db.close();
console.log("Finish inserting data: " + new Date());
}
}
Full insert code
MongoClient.connect(dbConnectionURL, function (err, db) {
if (err) {
console.log("Error connecting to DB: " + err);
} else {
var productCollection = db.collection('products');
console.log("Connected to DB");
console.log("Start inserting data: " + new Date());
var queryCompletedCount = 0;
for (var i = 0; i < items.length; i++) {
productCollection.insert([{
manufacturerCode: null,
name: items[i].name,
description: null
}], function (err, result) {
queryCompletedCount++;
if (err) console.log(err);
//Not sure about doing it this way
//Close db once all queries have returned a result
if (queryCompletedCount === items.length) {
db.close();
console.log("Finish inserting data: " + new Date());
}
});
}
}
});
What do you think about realizing this issue with async module like this:
async = require('async')
async.eachSeries(items, function (item, next) {
productCollection.insert(productCollection.insert(
[{
manufacturerCode: null,
name: item.name,
description: null
}], function (err, result) {
if (err) {
return next(err);
}
next();
})
)
}, function () {
// this will be called after all insertion completed
db.close();
console.log("Finish inserting data: " + new Date());
});
What you need here is MongoDB's Write Concern, configured in the strictest way.
There are two levels of Write Concern. The first is the write mode, in which case the query returns only if the result is written to the configured number of mongo instances. In your case I suppose there is a single instance, but for future you may configure it as "w": "majority". The second level is the Journal concern, where by setting "j": 1 your query will return only when the data is written into the journal.
So in your case you best Write Concern configuration might be {"w": "majority", "j": 1}. Just add it as the last argument of your insert statement.
I created code like this for getting news from xml export from another website and I am trying to fill with it my database.
function UpdateLunchTime() {
var httpRequest = require('request');
var xml2js = require('xml2js');
var parser = new xml2js.Parser();
var url = 'http://www...com/export/xml/actualities';
httpRequest.get({
url: url
}, function(err, response, body) {
if (err) {
console.warn(statusCodes.INTERNAL_SERVER_ERROR,
'Some problem.');
} else if (response.statusCode !== 200) {
console.warn(statusCodes.BAD_REQUEST,
'Another problem');
} else {
//console.log(body);
parser.parseString(body, function (err2, result) {
//console.log(result.Root.event);
var count = 0;
for (var i=0;i<result.Root.event.length;i++)
{
//console.log(result.Root.event[i]);
InsertActionToDatabase(result.Root.event[i]);
}
/*
result.Root.event.forEach(function(entry) {
InsertActionToDatabase(entry);
});
*/
});
}
});
}
function InsertActionToDatabase(action)
{
var queryString = "INSERT INTO Action (title, description, ...) VALUES (?, ?, ...)";
mssql.query(queryString, [action.akce[0], action.description[0],...], {
success: function(insertResults) {
},
error: function(err) {
console.log("Problem: " + err);
}
});
}
For individual actualities it's working fine but when I run it over whole xml I get this error:
Error: [Microsoft][SQL Server Native Client 10.0][SQL Server]Resource ID : 1. The request limit for the database is 180 and has been reached. See 'http://go.microsoft.com/fwlink/?LinkId=267637' for assistance.
And for a few last objects I get this error:
Error: [Microsoft][SQL Server Native Client 10.0]TCP Provider: Only one usage of each socket address (protocol/network address/port) is normally permitted.
Thanks for help
The problem is that you're trying to make too many concurrent (insert) operations in your database. Remember that in node.js (almost) everything is asynchronous, so when you call InsertActionToDatabase for one of the items, this operation will start right away and not wait before it finishes to return. So you're basically trying to insert all of the events at once, and as the error message said there's a limit on the number of concurrent connections which can be made to the SQL server.
What you need to do is to change your loop to run asynchronously, by waiting for one of the operations to complete before starting the next one (you can also "batch" send a smaller number of operations at once, continuing after each batch is complete, but the code is a little more complicated) as shown below.
var count = result.Root.event.length;
var insertAction = function(index) {
if (index >= count) return;
InsertActionToDatabase(result.Root.event[i], function() {
insertAction(index + 1);
});
}
insertAction(0);
And the InsertActionToDatabase function would take a callback parameter to be called when it's done.
function InsertActionToDatabase(item, done) {
var table = tables.getTable('event');
table.insert(item, {
success: function() {
console.log('Inserted event: ', item);
done();
}
});
}