I'm running the below node-rdkafka code in Eclipse as Node.js application. This is the sample code from https://blizzard.github.io/node-rdkafka/current/tutorial-producer_.html
I want to run this in a test server and call from iOS Mobile application.
I knew about running node.js app in AWS.
Question I: Is there any other options to run in a free test server environment like Tomcat?
Question II: Even If I am able to run this node.js app in a server, how do i call from a mobile application? Do I need to call producer.on('ready', function(arg) (or) What function i need to call from Mobile app?
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
//counter to stop this sample after maxMessages are sent
var counter = 0;
var maxMessages = 10;
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
}
//need to keep polling for a while to ensure the delivery reports are received
var pollLoop = setInterval(function() {
producer.poll();
if (counter === maxMessages) {
clearInterval(pollLoop);
producer.disconnect();
}
}, 1000);
});
/*
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});*/
//starting the producer
producer.connect();
First of all, you need an HTTP server. ExpressJS can be used. Then, just tack on the Express code basically at the end, but move the producer loop into the request route.
So, start with what you had
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
});
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});
//starting the producer
producer.connect();
Then, you can add this in the same file.
var express = require('express')
var app = express()
app.get('/', (req, res) => res.send('Ready to send messages!'))
app.post('/:maxMessages', function (req, res) {
if (req.params.maxMessages) {
var maxMessages = parseInt(req.params.maxMessages);
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
} // end for
} // end if
}); // end app.post()
app.listen(3000, () => console.log('Example app listening on port 3000!'))
I don't think the poll loop is necessary since you don't care about the counter anymore.
Now, connect your mobile app to http://<your server IP>:3000/ and send test messages with a POST request to http://<your server IP>:3000/10, for example, and adjust to change the number of messages to send
I might be late on this but this is how I did using promises and found it better than have a time out etc.
const postMessageToPublisher = (req, res) => {
return new Promise((resolve, reject) => {
producer.connect();
producer.setPollInterval(globalConfigs.producerPollingTime);
const actualBody = requestBody.data;
const requestBody = req.body;
const topicName = req.body.topicName;
const key = requestBody.key || uuid();
const partition = requestBody.partition || undefined;
const data = Buffer.from(JSON.stringify(udpatedBody));
/**
* Actual messages are sent here when the producer is ready
*/
producer.on(kafkaEvents.READY, () => {
try {
producer.produce(
topic,
partition,
message,
key // setting key user provided or UUID
);
} catch (error) {
reject(error);
}
});
// Register listener for debug information; only invoked if debug option set in driver_options
producer.on(kafkaEvents.LOG, log => {
logger.info('Producer event log notification for debugging:', log);
});
// Register error listener
producer.on(kafkaEvents.ERROR, err => {
logger.error('Error from producer:' + JSON.stringify(err));
reject(err);
});
// Register delivery report listener
producer.on(kafkaEvents.PUBLISH_ACKNOWLEDGMENT, (err, ackMessage) => {
if (err) {
logger.error(
'Delivery report: Failed sending message ' + ackMessage.value
);
logger.error('and the error is :', err);
reject({ value: ackMessage.value, error: err });
} else {
resolve({
teamName: globalConfigs.TeamNameService,
topicName: ackMessage.topic,
key: ackMessage.key.toString()
});
}
});
});
};
Please note that kafkaEvents contains my constants for the events we listen to and it is just a reference such as kafkaEvents.LOG is same as event.log
and also the calling function is expecting this to a promise and accordingly we user .then(data => 'send your response to user from here') and .catch(error => 'send error response to user
this is how I achieved it using promises
Related
So, for a course i'm taking, we're coding a UDP pinger in Javascript, using Node.js and Dgram. We've been given the following assignment:
Create the client code for an application. Your client should send 10 ping messages to the target UDP server. For each message, your client should calculate the round trip time from when the package is sent to when the response is received. Should a package be dropped along the way, the client is to handle this as well. This should be done by having the client wait 1 second for a response after sending each package. If no reply is received, the client should log accordingly (package lost, no response, timeout, etc.) and send a new package to try again. However, the total amount of packages sent should still only be 10. The client should also calculate a percentage of packages lost/no response received, and log this before connection is closed.
THis if course seems rather straight forward, and I thought so. I've been coding it for a while, and I'm almost finished, but I'm having issues with the aspect of making the client send a package, await response, and then act accordingly.
So far, what my code does is basically to send a ping, and when a pong is received, it sends another ping. What I can't figure out is how to make it log that a response wasn't received before sending the next package. In other words, I know how to make it react to a received response, I just don't know how to make it respond if no response is given within a set timeframe. I've tried playing around with if-statements and loops, as well as async functions, but I haven't made it work yet, so now I'm asking for help.
Code is here:
const dgram = require("dgram");
const ms = require("ms");
var client = dgram.createSocket("udp4");
const PORT = 8000;
const HOST = "localhost";
let today = "";
let t0 = "";
let t1 = "";
let RTT = "";
let sentPackages = "";
let receivedPackages = "";
const messageOutbound = Buffer.from("You Up?");
sendPackage();
const x = setInterval(sendPackage, 1000);
client.on("message", (message, remote) => {
receivedPackages++
today = new Date();
t1 = today.getTime();
console.log(
`Message from: ${remote.address}:${remote.port} saying: ${message}`
);
RTT = ms(t1 - t0, { long: true });
console.log(RTT);
const x = setInterval(sendPackage, 1000);
});
client.on('error', (err) => {
console.log(`server error:\n${err.stack}`);
server.close();
});
async function sendPackage() {
if (sentPackages < 10) {
client.send(messageOutbound, 0, messageOutbound.length, PORT, HOST, () => {
sentPackages++
let today = new Date();
t0 = today.getTime();
console.log(
`message has been sent to ${HOST}:${PORT}. Message sent at: ${t0}`
);
});
} else {
calculateLoss();
client.close();
}
};
function calculateLoss() {
let amountLost = sentPackages - receivedPackages;
let percentageLoss = amountLost / sentPackages * 100
console.log(amountLost);
console.log(percentageLoss +"% of packages lost");
};
I would use async / await to simply wait 1000ms / 1s between messages, then keep track of all messages in an array.
We identify messages with a uuid, so we can ensure that messages we receive can be matched to those we send.
We can then log all the required statistics afterwards:
const dgram = require("dgram");
const uuid = require('uuid');
const PORT = 8000;
const HOST = "localhost";
const client = dgram.createSocket("udp4");
// Array that keeps track of the messages we send
let messages = [];
// When we get a message, decode it and update our message list accordingly...
client.on("message", (messageBuffer, remote) => {
let receivedMessage = bufferToMessage(messageBuffer);
// Find the message we sent and set the response time accordingly.
let message = messages.find(message => message.uuid === (receivedMessage ||{}).uuid);
if (message) {
message.responseTimestamp = new Date().getTime();
}
});
client.on('error', (err) => {
console.log(`server error:\n${err.stack}`);
server.close();
});
function createMessage() {
return { uuid: uuid.v4() };
}
function messageToBuffer(message) {
return Buffer.from(JSON.stringify(message), "utf-8");
}
function bufferToMessage(buffer) {
try {
return JSON.parse(buffer.toString("utf-8"));
} catch (error) {
return null;
}
}
// Wait for timeout milliseconds
function wait(timeout) {
return new Promise(resolve => setTimeout(resolve, timeout));
}
function sendMessage(message, port, host) {
// Save the messages to our list...
messages.push(message);
console.log(`Sending message #${messages.length}...`);
// Set the time we send out message...
message.sentTimestamp = new Date().getTime();
let messageBuffer = messageToBuffer(message);
return new Promise((resolve, reject) => {
client.send(messageBuffer, 0, messageBuffer.length, port, host, (error, bytes) => {
if (error) {
reject(error);
} else {
resolve(bytes);
}
})
});
}
async function sendMessages(messageCount, port, host, timeout) {
for(let messageIndex = 0; messageIndex < messageCount; messageIndex++) {
let message = createMessage();
await sendMessage(message, port, host);
await wait(timeout);
if (message.responseTimestamp) {
console.log(`Response received after ${message.responseTimestamp - message.sentTimestamp} ms...`);
} else {
console.log(`No response received after ${timeout} ms...`);
}
}
logStatistics(messages);
}
function logStatistics(messages) {
let messagesSent = messages.length;
let messagesReceived = messages.filter(m => m.responseTimestamp).length;
let messagesLost = messagesSent - messagesReceived;
console.log(`Total messages sent: ${messagesSent}`);
console.log(`Total messages received: ${messagesReceived}`);
console.log(`Total messages lost: ${messagesLost} / ${(100*messagesLost / (messages.length || 1) ).toFixed(2)}%`);
if (messagesReceived > 0) {
console.log(`Average response interval:`, messages.filter(m => m.responseTimestamp).reduce((averageTime, message) => {
averageTime += (message.responseTimestamp - message.sentTimestamp) / messagesReceived;
return averageTime;
}, 0) + " ms");
}
}
sendMessages(10, PORT, HOST, 1000);
I am manually successfully able to telnet to linux345 at port 2345.
This means the following code should display output as 0
However, the code output is returning 1.
It seems that converting the callback to async promise format will help resolve the issue.
Please suggest how the updated code would look like.
const net = require('net');
const HOST = 'linux345';
const PORT = 2345;
let ErrCode = 1;
const client = new net.Socket();
client.connect(PORT, HOST, function() {
ErrCode = 0;
});
client.on('data', function(data) {
console.log('Client received: ' + data);
if (data.toString().endsWith('exit')) {
client.destroy();
}
});
client.on('close', function() {
});
client.on('error', function(err) {
ErrCode = err.code;
console.log(ErrCode);
});
console.log(ErrCode);
let ErrCode = 1;
const client = new net.Socket();
const connect = util.promisify(client.connect);
async function testFun() {
try {
let data = await connect(PORT, HOST);
ErrCode = 0;
console.log('Client received: ' + data);
if (data.toString().endsWith('exit')) {
client.destroy();
}
}
catch (ex) {
ErrCode = -1;
}
}
testFun().then(() => {
console.log(ErrCode)
}
)
uses util.promisify
Takes a function following the common error-first callback style, i.e.
taking a (err, value) => ... callback as the last argument, and
returns a version that returns promises.
I've been able to create the database and query it. Using Microsoft's tutorial on using node.js to query, I have been able to accomplish this with this code:
// Simple Query
"use strict";
var documentClient = require("documentdb").DocumentClient;
var config = require("./config");
var url = require('url');
// use the previously saved config.endpoint and config.primaryKey to create a new DocumentClient
var client = new documentClient(config.endpoint, { "masterKey": config.primaryKey });
// These urls are how the DocumentDB client will find the right database and collection.
var HttpStatusCodes = { NOTFOUND: 404 };
var databaseUrl = `dbs/${config.database.id}`;
var collectionUrl = `${databaseUrl}/colls/${config.collection.id}`;
// Query JSON document collection
function queryCollection() {
console.log(`Querying collection through index:\n${config.collection.id}`);
return new Promise((resolve, reject) => {
client.queryDocuments(
collectionUrl,
'SELECT VALUE gd.NFL FROM GamblersDenDB gd WHERE gd.id = "SanDiego"'
).toArray((err, results) => {
if (err) reject(err)
else {
for (var queryResult of results) {
let resultString = JSON.stringify(queryResult);
console.log(`\tQuery returned ${resultString}`);
}
console.log();
resolve(results);
}
});
});
};
queryCollection()
Running that js file in my command prompt works! It results in the output:
C:\Users\kenv\Desktop\DocDB Test>node SimpleQuery.js
Querying collection through index:
GamblersDenColl
Query returned {"ID":"SDC","name":"Chargers"}
Great. So now I've transferred my code to my project's folder and try to run in the app with
taco run android --device
When I pull up the console, the first error that sticks out to me is
Uncaught ReferenceError: require is not defined(…)
It's pointing to the line var documentClient = require("documentdb").DocumentClient; in my code.
Here is my complete js file code that's throwing the error:
(function () {
"use strict";
document.addEventListener( 'deviceready', onDeviceReady.bind( this ), false );
function onDeviceReady() {
navigator.splashscreen.hide();
console.log("Cordova is READY!");
// Handle the Cordova pause and resume events
document.addEventListener( 'pause', onPause.bind( this ), false );
document.addEventListener( 'resume', onResume.bind( this ), false );
$(".btnURL").on("click", function(){loadURL($(this))});
function loadURL(theObj) {
cordova.InAppBrowser.open(theObj.data("url"), "_blank", "location=yes");
}
//********* jQuery VARIABLES ***************//
var $elBtnSaveName= $("#btnSaveName"),
$elShowClients= $("#btnShowClients"),
$elDivShow= $("#divShow"),
$elFormClient= $("#formClient");
//********** EVENT HANDLERS *****************//
$elShowClients.on("click", queryCollection);
//********************* DOCUMENT DB SECTION *********************************************************/
var documentClient = require("documentdb").DocumentClient;
var config = require("./config");
var url = require('url');
// use the previously saved config.endpoint and config.primaryKey to create a new DocumentClient
var client = new documentClient(config.endpoint, { "masterKey": config.primaryKey });
// These urls are how the DocumentDB client will find the right database and collection.
var HttpStatusCodes = { NOTFOUND: 404 };
var databaseUrl = `dbs/${config.database.id}`;
var collectionUrl = `${databaseUrl}/colls/${config.collection.id}`;
// Query JSON document collection
function queryCollection() {
console.log(`Querying collection through index:\n${config.collection.id}`);
return new Promise((resolve, reject) => {
client.queryDocuments(
collectionUrl,
'SELECT VALUE gd.NFL FROM GamblersDenDB gd WHERE gd.id = "SanDiego"'
).toArray((err, results) => {
if (err) reject(err)
else {
for (var queryResult of results) {
let resultString = JSON.stringify(queryResult);
console.log(`\tQuery returned ${resultString}`);
}
console.log();
resolve(results);
fnShowClientsTable(result.rows);
}
});
});
};
function fnShowClientsTable(data){
var str = "<p><table id='tableResults'";
str += "<tr><th>ID</th><th>Name</th><th class='thEmpty'> </th></tr>" //added class to <th> for formatting
for(var i = 0; i < data.length; i++) { // For X number of times worth of data...
str += "<tr><td>" + data[i].doc.ID +
"</td><td>" + data[i].doc.name +
"</td><td class='btnPencil'>✎</td></tr>";
}
str += "</table></p>"; // END table
$elDivShow.html(str); //Show string as HTML on screen
} // END fnShowClientsTable
//************************* END DOCUMENT DB SECTION ******************************************************/
}; // END onDeviceReady()
function onPause() {
// TODO: This application has been suspended. Save application state here.
};
function onResume() {
// TODO: This application has been reactivated. Restore application state here.
};
} )();
I realize my other function to push it to a string for display in HTML is probably wrong (which I will most certainly have another post about that one :)) but right now I'm trying to determine how I can get past this first error.
So, every time I refresh the page, it seems like sockjs is creating a new connection.
I am saving every message to my mongodb on every channel.onmessage, so if I refresh my page 7 times and send a message, I would save 7 messages of the same content into my mongodb.
This is very problematic because when I retrieve those messages when I go into the chat room, to see the log, I would see bunch of duplicate messages.
I want to keep track of all connections that are 'active', and if a user tries to make another connection, I want to terminate the old one so there is only one connection listening to each message at a time.
How do I do this ?
var connections = {};
//creating the sockjs server
var chat = sockjs.createServer();
//installing handlers for sockjs server instance, with the same url as client
chat.installHandlers(server, {prefix:'/chat/private'});
var multiplexer = new multiplexServer.MultiplexServer(chat);
var configChannel = function (channelId, userId, userName){
var channel = multiplexer.registerChannel(channelId);
channel.on('connection', function (conn) {
// console.log('connection');
console.log(connections);
connections[channelId] = connections[channelId] || {};
if (connections[channelId][userId]) {
//want to close the extra connection
} else {
connections[channelId][userId] = conn;
}
// }
// if (channels[channelId][userId]) {
// conn = channels[channelId][userId];
// } else {
// channels[channelId][userId] = conn;
// }
// console.log('accessing channel! ', channels[channelId]);
conn.on('new user', function (data, message) {
console.log('new user! ', data, message);
});
// var number = connections.length;
conn.on('data', function(message) {
var messageObj = JSON.parse(message);
handler.saveMessage(messageObj.channelId, messageObj.user, messageObj.message);
console.log('received the message, ', messageObj.message);
conn.write(JSON.stringify({channelId: messageObj.channelId, user: messageObj.user, message: messageObj.message }));
});
conn.on('close', function() {
conn.write(userName + ' has disconnected');
});
});
return channel;
};
The way I resolve a problem like yours was with a Closure and Promises, I don't know if that could help you. I let you the code that help me, this is with EventBus from Vertx:
window.Events = (function NewEvents() {
var eventBusUrl = $('#eventBusUrl').val();
var eventBus = null;
return new RSVP.Promise(function(resolve, reject) {
if(!eventBus) {
eventBus = new vertx.EventBus(eventBusUrl);
eventBus.onopen = function eventBusOpened() {
console.log('Event bus online');
resolve(eventBus);
}
eventBus.onclose = function() {
eventBus = null;
};
}
});
}());
And then in other script I call it in this way:
Events.then(function(eventBus) {
console.log("registering handlers for comments");
eventBus.registerHandler(address, function(incomingMessage) {
console.log(incomingMessage);
});
});
I hope this can help you.
Regards.
I've been looking for quite a while for a solution but haven't found anything yet.
I'm trying to emit a message from a server every time the server sees that a file has changed in a specified directory. However, instead of only emitting one message, it insists on emitting the same message three times. I am using chokidar to watch the directory, and inside of the 'change' event I emit the message.
Server side code:
var express = require('express')
, app = express()
, http = require('http')
, server = http.Server(app)
, io =require('socket.io')(server)
, chokidar = require('chokidar');
server.listen(1234);
app.use('/public', express.static( __dirname + '/public'));
app.get('/', function(request, response){
var ipAddress = request.socket.remoteAddress;
console.log("New express connection from: " + ipAddress);
response.sendfile(__dirname + '/public/index.html'); //Server client
});
var watcher = chokidar.watch("temp", {ignored: /[\/\\]\./, persistent: true});
watcher.on('change', function(path){
console.log(path + " has changed.");
fs.readFile(path,'utf8', function(err, data){
if(err) {
return console.log(err);
}
else
{
var json = JSON.parse(data), recPsec, type;
recPsec = json.data[0].values[0];
type = json.data[0].values[16];
var compiled = {
"recPsec" : recPsec,
"type" : type
}
var jsonMessage = JSON.stringify(compiled)
io.sockets.emit('message', JSON.stringify(jsonMessage));
console.log("Sent message");
}
});
});
watcher.on('unlink', function(path){
console.log('File: ', path, ' has been removed');
});
watcher.on('add', function(path){
console.log("hi");
fs.readFile(path,'utf8', function(err, data){
if(err) {
return console.log(err);
}
else
{
var json = JSON.parse(data), recPsec, type;
recPsec = json.data[0].values[0];
type = json.data[0].values[16];
var compiled = {
"recPsec" : recPsec,
"type" : type
}
var jsonMessage = compiled;
io.sockets.emit('message', JSON.stringify(jsonMessage));
console.log("message sent");
}
//fs.unlinkSync(path);
});
});
Client Side:
var socket = io.connect('http://localhost');
socket.on('message', function(data){
console.log(data);
var parsed = JSON.parse(data);
recPsecNew = parsed.recPsec;
typeNew = parsed.type;
analyze(recPsecNew, typeNew);
});
I am using socket.io in conjunction with express 4.
Chokidar is found here: https://github.com/paulmillr/chokidar
Logs from the console if I change the name of a file twice are shown here: http://s000.tinyupload.com/?file_id=95726281991906625675
Have you tried lodash's Function?
Probably you can use lodash.debounce function
According to its docs:
_.debounce(func, [wait=0], [options])
Creates a debounced function that delays invoking func until after wait milliseconds have elapsed since the last time the debounced function was invoked. The debounced function comes with a cancel method to cancel delayed invocations. Provide an options object to indicate that func should be invoked on the leading and/or trailing edge of the wait timeout. Subsequent calls to the debounced function return the result of the last func invocation.