I using StreamSaver and NodeJS together for save 10,000,000 Records into a file with .Json format. After run app and downloaded about many records(200,000 or 300,000 or ....), downloading canceled and abort file.json and show error "Network failed" on downloader in browser.
Why show this error and canceled downloading?
Notice: I haven't any error in console and I downloaded about 500MB or more...So no problem in range/memory.
Server.js
var http = require('http');
var express = require('express');
var app = express();
var server = http.Server(app);
var io = require('socket.io')(server);
io.on('connection', function (socket) {
console.log('Connection is ready!')
socket.on('get_records', function (data) {
var connection = new sql.Connection(config, function (err) {
if (err) {
console.log(err.message);
}
var request = new sql.Request(connection);
request.stream = true;
request.query("Select * from my_table");
// ... error checks
request.on('recordset', function (recordset) {
// Emitted once for each recordset in a query
});
request.on('row', function (record) {
// Emitted for each row in a recordset
socket.emit('recieve_records', record); //send record by record to client
});
request.on('error', function (err) {
console.log(err.message);
});
request.on('done', function (returnValue) {
// Always emitted as the last one
});
});
});
client.js
var fileStream = streamSaver.createWriteStream('filename.json');
writer = fileStream.getWriter();
encoder = new TextEncoder();
var counter = 0;
socket.on('recieve_records', function (data) {
var uint8array = encoder.encode(data + "\n"); // data='every things'
writer.write(uint8array);
counter++;
if (counter > 100000) {
console.log('100.000 messages downloaded');
counter = 0;
}
});
Related
I am manually successfully able to telnet to linux345 at port 2345.
This means the following code should display output as 0
However, the code output is returning 1.
It seems that converting the callback to async promise format will help resolve the issue.
Please suggest how the updated code would look like.
const net = require('net');
const HOST = 'linux345';
const PORT = 2345;
let ErrCode = 1;
const client = new net.Socket();
client.connect(PORT, HOST, function() {
ErrCode = 0;
});
client.on('data', function(data) {
console.log('Client received: ' + data);
if (data.toString().endsWith('exit')) {
client.destroy();
}
});
client.on('close', function() {
});
client.on('error', function(err) {
ErrCode = err.code;
console.log(ErrCode);
});
console.log(ErrCode);
let ErrCode = 1;
const client = new net.Socket();
const connect = util.promisify(client.connect);
async function testFun() {
try {
let data = await connect(PORT, HOST);
ErrCode = 0;
console.log('Client received: ' + data);
if (data.toString().endsWith('exit')) {
client.destroy();
}
}
catch (ex) {
ErrCode = -1;
}
}
testFun().then(() => {
console.log(ErrCode)
}
)
uses util.promisify
Takes a function following the common error-first callback style, i.e.
taking a (err, value) => ... callback as the last argument, and
returns a version that returns promises.
I'm implementing a websocket client in node and my webhook's trying to handle multiple connections from a chatbot service. For example: a new user income, a websocket connection is established on an external chatbot service. The websocket URL is obtained through XMLHttpRequest in my code. And then I use this url to connect to the chatbot service using the ws object (new WebSocket('wssUrlObtainedThroughAjaxRequest','default-protocol')). So each user have a WebSocket. The question is that my code runs sequentially. So if two people sends message to my webhook node, things don't works properly because parallelism. Well, I'll post example code here to make it better to understand.
const express = require('express');
const PORT = process.env.PORT || 8002;
let WebSocket = require('ws');
let CONNECTIONS = new Map();
...
...
...
const app = express()
.use(bodyparser.urlencoded({extended: false}))
.use(bodyparser.json());
app.post('/', (req, res) => {
...
...
...
} else if (req.body.type === 'MESSAGE') {
let DM = req.body.space.name;
let msg = req.body.message.text;
ws = (CONNECTIONS.get(DM)!=null) ? CONNECTIONS.get(DM) : null;
if(ws==null || ws.readyState==3){
controlws.gerarURLWS();
ws = new WebSocket(controlws.urlws, 'talk-protocol');
CONNECTIONS.set(DM,ws);
}
// Executes on websocket openning
ws.onopen = function (event) {
console.log('Canal aberto;');
keepAliveWS();
ws.send(JSON.stringify(msgKoreAi(msg)));
}
if(ws.readyState==1)
ws.send(JSON.stringify(msgKoreAi(msg)));
ws.onmessage = async function (event) {
let resp = JSON.parse(event.data);
if (resp.type == "bot_response") {
text = resp.message[0].component.payload.text;
if(text==null){ // tem quick reply
//mount card hangouts response
let qreplies = resp.message[0].component.payload.payload.quick_replies;
card = '{"sections": [{"widgets": [{"buttons": [';
for(let i=0; i<qreplies.length; i++){
if(i!=qreplies.length-1)
card+='{"textButton": {"text": "'+qreplies[i].payload+'","onClick": {"action": {"actionMethodName": "'+qreplies[i].payload+'"}}}},';
else
card+='{"textButton": {"text": "'+qreplies[i].payload+'","onClick": {"action": {"actionMethodName": "'+qreplies[i].payload+'"}}}}';
}
card+=']}]}],"name": "respostas"}';
card = JSON.parse(card);
text = resp.message[0].component.payload.payload.text;
{
await assyncMessage(DM, text);
await assyncMessage(DM, card);
}
return;
}
//Send assync messages if synchronous was already sent
if(res.headersSent){
{
return await assyncMessage(DM, text);
}
}
else
return res.json({text});
}
}
return;
}
...
...
...
app.listen(PORT, () => {
console.log(`Server is running in port - ${PORT}`);
});
I'm running the below node-rdkafka code in Eclipse as Node.js application. This is the sample code from https://blizzard.github.io/node-rdkafka/current/tutorial-producer_.html
I want to run this in a test server and call from iOS Mobile application.
I knew about running node.js app in AWS.
Question I: Is there any other options to run in a free test server environment like Tomcat?
Question II: Even If I am able to run this node.js app in a server, how do i call from a mobile application? Do I need to call producer.on('ready', function(arg) (or) What function i need to call from Mobile app?
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
//counter to stop this sample after maxMessages are sent
var counter = 0;
var maxMessages = 10;
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
}
//need to keep polling for a while to ensure the delivery reports are received
var pollLoop = setInterval(function() {
producer.poll();
if (counter === maxMessages) {
clearInterval(pollLoop);
producer.disconnect();
}
}, 1000);
});
/*
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});*/
//starting the producer
producer.connect();
First of all, you need an HTTP server. ExpressJS can be used. Then, just tack on the Express code basically at the end, but move the producer loop into the request route.
So, start with what you had
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
});
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});
//starting the producer
producer.connect();
Then, you can add this in the same file.
var express = require('express')
var app = express()
app.get('/', (req, res) => res.send('Ready to send messages!'))
app.post('/:maxMessages', function (req, res) {
if (req.params.maxMessages) {
var maxMessages = parseInt(req.params.maxMessages);
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
} // end for
} // end if
}); // end app.post()
app.listen(3000, () => console.log('Example app listening on port 3000!'))
I don't think the poll loop is necessary since you don't care about the counter anymore.
Now, connect your mobile app to http://<your server IP>:3000/ and send test messages with a POST request to http://<your server IP>:3000/10, for example, and adjust to change the number of messages to send
I might be late on this but this is how I did using promises and found it better than have a time out etc.
const postMessageToPublisher = (req, res) => {
return new Promise((resolve, reject) => {
producer.connect();
producer.setPollInterval(globalConfigs.producerPollingTime);
const actualBody = requestBody.data;
const requestBody = req.body;
const topicName = req.body.topicName;
const key = requestBody.key || uuid();
const partition = requestBody.partition || undefined;
const data = Buffer.from(JSON.stringify(udpatedBody));
/**
* Actual messages are sent here when the producer is ready
*/
producer.on(kafkaEvents.READY, () => {
try {
producer.produce(
topic,
partition,
message,
key // setting key user provided or UUID
);
} catch (error) {
reject(error);
}
});
// Register listener for debug information; only invoked if debug option set in driver_options
producer.on(kafkaEvents.LOG, log => {
logger.info('Producer event log notification for debugging:', log);
});
// Register error listener
producer.on(kafkaEvents.ERROR, err => {
logger.error('Error from producer:' + JSON.stringify(err));
reject(err);
});
// Register delivery report listener
producer.on(kafkaEvents.PUBLISH_ACKNOWLEDGMENT, (err, ackMessage) => {
if (err) {
logger.error(
'Delivery report: Failed sending message ' + ackMessage.value
);
logger.error('and the error is :', err);
reject({ value: ackMessage.value, error: err });
} else {
resolve({
teamName: globalConfigs.TeamNameService,
topicName: ackMessage.topic,
key: ackMessage.key.toString()
});
}
});
});
};
Please note that kafkaEvents contains my constants for the events we listen to and it is just a reference such as kafkaEvents.LOG is same as event.log
and also the calling function is expecting this to a promise and accordingly we user .then(data => 'send your response to user from here') and .catch(error => 'send error response to user
this is how I achieved it using promises
I've been looking for quite a while for a solution but haven't found anything yet.
I'm trying to emit a message from a server every time the server sees that a file has changed in a specified directory. However, instead of only emitting one message, it insists on emitting the same message three times. I am using chokidar to watch the directory, and inside of the 'change' event I emit the message.
Server side code:
var express = require('express')
, app = express()
, http = require('http')
, server = http.Server(app)
, io =require('socket.io')(server)
, chokidar = require('chokidar');
server.listen(1234);
app.use('/public', express.static( __dirname + '/public'));
app.get('/', function(request, response){
var ipAddress = request.socket.remoteAddress;
console.log("New express connection from: " + ipAddress);
response.sendfile(__dirname + '/public/index.html'); //Server client
});
var watcher = chokidar.watch("temp", {ignored: /[\/\\]\./, persistent: true});
watcher.on('change', function(path){
console.log(path + " has changed.");
fs.readFile(path,'utf8', function(err, data){
if(err) {
return console.log(err);
}
else
{
var json = JSON.parse(data), recPsec, type;
recPsec = json.data[0].values[0];
type = json.data[0].values[16];
var compiled = {
"recPsec" : recPsec,
"type" : type
}
var jsonMessage = JSON.stringify(compiled)
io.sockets.emit('message', JSON.stringify(jsonMessage));
console.log("Sent message");
}
});
});
watcher.on('unlink', function(path){
console.log('File: ', path, ' has been removed');
});
watcher.on('add', function(path){
console.log("hi");
fs.readFile(path,'utf8', function(err, data){
if(err) {
return console.log(err);
}
else
{
var json = JSON.parse(data), recPsec, type;
recPsec = json.data[0].values[0];
type = json.data[0].values[16];
var compiled = {
"recPsec" : recPsec,
"type" : type
}
var jsonMessage = compiled;
io.sockets.emit('message', JSON.stringify(jsonMessage));
console.log("message sent");
}
//fs.unlinkSync(path);
});
});
Client Side:
var socket = io.connect('http://localhost');
socket.on('message', function(data){
console.log(data);
var parsed = JSON.parse(data);
recPsecNew = parsed.recPsec;
typeNew = parsed.type;
analyze(recPsecNew, typeNew);
});
I am using socket.io in conjunction with express 4.
Chokidar is found here: https://github.com/paulmillr/chokidar
Logs from the console if I change the name of a file twice are shown here: http://s000.tinyupload.com/?file_id=95726281991906625675
Have you tried lodash's Function?
Probably you can use lodash.debounce function
According to its docs:
_.debounce(func, [wait=0], [options])
Creates a debounced function that delays invoking func until after wait milliseconds have elapsed since the last time the debounced function was invoked. The debounced function comes with a cancel method to cancel delayed invocations. Provide an options object to indicate that func should be invoked on the leading and/or trailing edge of the wait timeout. Subsequent calls to the debounced function return the result of the last func invocation.
I have a basic Node JS server which is designed to be used as an API, I've created a log and database module and I've started adding other modules to deal with different request types.
I'm using Express.js and node-mysql
When I visit /v1/group I get the following error -
TypeError: Cannot read property 'database' of undefined
at Group.getAll (C:\code\javascript\node\api\api\v1\groups.js:12:23)
at callbacks (C:\code\javascript\node\api\node_modules\express\lib\router\index.js:161:37) ...
So I guess after recieving a request and calling group.getAll() that this is undefined but I don't understand why, is there a way to set this or have I structured my application all wrong?
sever.js
"use strict";
var Express = require('express');
var Log = require('./database/log');
var Database = require('./database/database');
var dbConfig = require('./dbconfig.json');
var Group = require('./api/v1/groups');
//Init express
var app = new Express();
//Init log and database
var log = new Log();
var database = new Database(dbConfig, log);
var initCallback = function() {
//Init routes
var group = new Group(database, log);
//Group routes
app.get('/v1/group', group.getAll);
app.get('/v1/group/:id', group.getByID);
app.listen(3000);
log.logMessage("INFO", "Listening on port 3000");
};
//Test database connection
database.getConnection(function(err, connection) {
if (err) {
log.logMessage("FATAL", "Error connecting to database, check database is running and the dbconfig.json file is present and correct.");
process.exit(1);
}
connection.end();
initCallback();
});
database.js
"use strict";
var mysql = require('mysql');
var Database = function(dbConfig, log) {
this.connected = false;
this.log = log;
this.log.logMessage("INFO", "Connecting to database with: Host - " + dbConfig.dbhost + ", Database port - " + dbConfig.dbport + ", Database name - " + dbConfig.dbname + ", User " + dbConfig.dbuser + ", Password length - " + dbConfig.dbpass.length);
this.pool = mysql.createPool({
host : dbConfig.dbhost,
user : dbConfig.dbuser,
port: dbConfig.dbport,
password : dbConfig.dbpass,
database: dbConfig.dbname
});
};
Database.prototype.getConnection = function() {
var args = arguments;
return this.pool.getConnection.apply(this.pool, arguments);
};
module.exports = Database;
groups.js
"use strict";
var Group = function(database, log) {
this.database = database;
this.log = log;
};
Group.prototype.getAll = function(req, res) {
console.log(this); // --> undefined
var query = 'SELECT * FROM invgroups WHERE published = 1';
this.database.getConnection(function(err, connection) { // --> error line
if (err) { res.send(500, "Database error"); }
connection.query(query, function(err, results) {
if (err) { res.send(500, "Database error"); }
res.send(results);
});
connection.end();
});
};
Group.prototype.getByID = function(req, res) {
console.log(this);
res.send({name: "Group Item 1"});
};
module.exports = Group;
You need to properly bind the function.
app.get('/v1/group', group.getAll);
only passes the getAll function as a handler, but the function itself has no concept of this. this is decided based on the context that is bound, or based on how the function is called. This blog post is useful for understanding how function context works.
app.get('/v1/group', group.getAll.bind(group));