I want to build a db of users and if they have a site check if the site is online save the url if not delete it and advise the user that the site is offline. The problem is when the program goes thru the request it seems to do nothing and just jumps out, I tried using axios and request but the problem still remains; I believe it might be due to asynchronous computations. Any help will be really appreciated =)
var r = require('rethinkdb');
var axios = require('axios');
var schema = function(data, callback){
new_schema = true;
var schema = {};
if(new_schema){
schema.user = user.id;
}
schema.name = data.title || '';
schema.email = data.email || '';
if(data.url){
axios.get(data.url).then(function(err, response){
if(err) schema.url = 'no site'
schema.url = 'data.url';
callback(schema);
}).catch(function(error){
console.log(error);
callback(schema);
});
}else{
callback(schema);
}
};
var datos = '';
command.stdout.on('data', (data) => {
datos = datos + data;
});
command.on('close', (code) => {
const objs = JSON.parse(datos);
for (var i in objs) {
let obj = schema(objs[i], function(sch){
console.log(sch);
});
}
}catch(e){
console.log(e);
}
process.exit();
});
});
The problem is due to the asynchronous. To solved it had to use queues and I used the library better-queue. This fix the problem and makes all the calls to the server side.
Related
I am trying to implement a download functionality using streams in NodeJS.
In the code I am trying to simulate an endpoint that sends data in chunks, something similar to paginated data, for example chunks in size of 5000. Or to make it further clear, we can send top and skip parameters to the endpoint to get a particular chunk of data. If no parameters are provided, it send the first 5000 entries.
There are 2 cases that I am trying to take care of:
When the user cancels the download from the browser, how do I handle the continuous fetching of data from the endpoint
When the user pauses the download from the browser, how do I pause the data fetching, and then resume once user resumes it
The first case can be taken care of using 'close' event of request. When the connection between the client and the server get cancelled, I disconnect.
If anyone has a better way of implementing this please suggest.
I am having trouble handling the second case when the user pauses.
If anyone could guide me through this, or even provide a better solution to the overall problem(incl. handling the chunks of data), it would be really helpful.
const {createServer} = require('http');
const {Transform} = require('stream');
const axios = require('axios');
var c = 0;
class ApiStream extends Transform {
constructor(apiCallback, res, req) {
super();
this.apiCallback = apiCallback;
this.isPipeSetup = false;
this.res = res;
this.req = req
}
//Will get data continuously
async start() {
let response;
try {
response = await this.apiCallback();
} catch (e) {
response = null;
}
if (!this.isPipeSetup) {
this.pipe(this.res);
this.isPipeSetup = true;
}
if (response) {
response = response.data
if (Array.isArray(response)) {
response.forEach((item) => {
this.push(JSON.stringify(item) + "\n");
});
} else if (typeof response === "object") {
this.push(JSON.stringify(response) + "\n");
} else if (typeof response === "string") {
this.push(response + "\n");
}
this.start()
}else{
this.push(null);
console.log('Stream ended')
}
}
}
const server = createServer(async (req, res, stream) => {
res.setHeader("Content-disposition", "attachment; filename=download.json");
res.setHeader("Content-type", "text/plain");
let disconnected = false;
const filestream = new ApiStream(async () => {
let response;
try {
if(disconnected){
console.log('Client connection closed')
return null;
}
c++;
response = await axios.get("https://jsonplaceholder.typicode.com/users");
//Simulate delay in data fetching
let z = 0;
if(c>=200) response = null;
while(z<10000){
let b = 0;
while(b<10000){
b+=0.5;
}
z +=0.5;
}
} catch (error) {
res.status(500).send(error);
}
if (response) {
return response;
}
return null;
}, res, req);
await filestream.start();
req.on('close', (err) => {
disconnected = true;
})
})
server.listen(5050, () => console.log('server running on port 5050'));
I am trying to do a traffic analysis app but my client side is rendering faster than my data loading on the server side. (More specifically, it's the visual recognition data that is not being rendered on the web, I am using IBM visual recognition cloud service)
May I know how can I solve this issue? I have been stuck at this for a few days.
This is my code where I calling my function.
fetchAPI(10).then((data) => {
var data = data["items"][0]["cameras"]
for (let i = 0; i < data.length; i++){
image = data[i]["image"]
images.push(image);
coordinates.push([data[i]["location"]["latitude"], data[i]["location"]["longitude"]]);
}}).then(recognition(10, pageRender))
This is where I do my visual recognition
async function recognition(res, callback){
// Fetch api to get the images to be analyse
var data = await fetchAPI(10)
var info = await data["items"][0]["cameras"]
// Loop through the images
for (let i = 0; i < info.length; i++){
var image = await info[i]["image"]
// Params to be analyse
var params = await {
url: image,
classifier_ids: classifier_ids
};
// The part where it analyse the image
await visualRecognition.classify(params, function(err, response) {
if (err)
console.log(err);
else
try{
var status = (JSON.stringify(response.images[0]["classifiers"][0]["classes"][0]["class"], null, 2));
var prob = (JSON.stringify(response.images[0]["classifiers"][0]["classes"][0]["score"], null, 2));
}
catch(error){
status = "Error getting status"
prob = "Error getting probability"
}
value.push([status, prob]);
});
}
await callback()}
This is the callback function which I use to render my page.
function pageRender(){
app.get("/", (req,res) => {
res.render("index", {
images: JSON.stringify(images),
coordinates: JSON.stringify(coordinates),
value: JSON.stringify(value),
})
console.log("I have finish rendering")
}) }
So I searched for a week now, tried every solution in other Posts or Forums, still nothing so yeah I'm in need of help .. please.
Node is up to date, if thats important. FYI. v9.4.0
If there's something else you guys need to know let me know.
'use strict'
var EventEmitter = require('events').EventEmitter
var util = require('util')
var WebSocketServer = require('ws').Server
var CONNECTION_ERROR_LOG_RATE = 1000 * 60 * 60
var Browser = function () {
if (!(this instanceof Browser)) return new Browser()
EventEmitter.call(this)
this.wss = null
this.ws = null
this.lastConnectionErrorLog = null
}
util.inherits(Browser, EventEmitter)
Browser.prototype.listen = function listen (port) {
console.log('Listening on websocket port %d', port)
this.wss = new WebSocketServer({port, host: '127.0.0.1'})
var self = this
this.wss.on('connection', function (ws) {
self.ws = ws
ws.on('message', function (data) {
var res = JSON.parse(data)
self.emit('message', res)
})
self.lastConnectionErrorLog = null
self.emit('connected')
})
this.wss.on('close', function () {
self.emit('closed')
})
this.wss.on('error', function (err) {
self.emit('error', err)
})
}
Browser.prototype.isConnected = function isConnected () {
return !!this.ws
}
Browser.prototype.send = function send (req) {
if (!this.ws) {
var elapsed = this.lastConnectionErrorLog === null ||
Date.now() - this.lastConnectionErrorLog > CONNECTION_ERROR_LOG_RATE
if (elapsed) {
console.log('browser not connected')
this.lastConnectionErrorLog = Date.now()
}
return
}
var self = this
var message = JSON.stringify(req)
this.ws.send(message, function (err) {
if (err) {
var elapsed = self.lastConnectionErrorLog === null ||
Date.now() - self.lastConnectionErrorLog > CONNECTION_ERROR_LOG_RATE
if (elapsed) {
self.lastConnectionErrorLog = Date.now()
self.emit('messageError', err)
}
} else {
self.emit('messageSent')
}
})
}
module.exports = Browser
I am kind of new to Websockets/Node/Javascript so it may be that the answer is very simple..
I apologize in advance
Thank you kindly
The error you're seeing is because you are sending a non-WebSocket request (i.e, a normal HTTP request from a web browser) to a WebSockets server.
To connect to a WebSockets server in a browser, you'll need to use the WebSocket interface in Javascript.
The code you posted doesn't include the string "Upgrade required" so the problem must be coming from elsewhere. Since you're working with Node/NPM, it's usually pretty easy to figure out where this problem is coming from. Just use your IDE to search across all files in your project director (including the node_modules folder) to see where the "Upgrade required" string is found. This will at least point you to which component is triggering that error and point you towards where to look for further troubleshooting and/or upgrading.
I'm running the below node-rdkafka code in Eclipse as Node.js application. This is the sample code from https://blizzard.github.io/node-rdkafka/current/tutorial-producer_.html
I want to run this in a test server and call from iOS Mobile application.
I knew about running node.js app in AWS.
Question I: Is there any other options to run in a free test server environment like Tomcat?
Question II: Even If I am able to run this node.js app in a server, how do i call from a mobile application? Do I need to call producer.on('ready', function(arg) (or) What function i need to call from Mobile app?
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
//counter to stop this sample after maxMessages are sent
var counter = 0;
var maxMessages = 10;
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
}
//need to keep polling for a while to ensure the delivery reports are received
var pollLoop = setInterval(function() {
producer.poll();
if (counter === maxMessages) {
clearInterval(pollLoop);
producer.disconnect();
}
}, 1000);
});
/*
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});*/
//starting the producer
producer.connect();
First of all, you need an HTTP server. ExpressJS can be used. Then, just tack on the Express code basically at the end, but move the producer loop into the request route.
So, start with what you had
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
});
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});
//starting the producer
producer.connect();
Then, you can add this in the same file.
var express = require('express')
var app = express()
app.get('/', (req, res) => res.send('Ready to send messages!'))
app.post('/:maxMessages', function (req, res) {
if (req.params.maxMessages) {
var maxMessages = parseInt(req.params.maxMessages);
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
} // end for
} // end if
}); // end app.post()
app.listen(3000, () => console.log('Example app listening on port 3000!'))
I don't think the poll loop is necessary since you don't care about the counter anymore.
Now, connect your mobile app to http://<your server IP>:3000/ and send test messages with a POST request to http://<your server IP>:3000/10, for example, and adjust to change the number of messages to send
I might be late on this but this is how I did using promises and found it better than have a time out etc.
const postMessageToPublisher = (req, res) => {
return new Promise((resolve, reject) => {
producer.connect();
producer.setPollInterval(globalConfigs.producerPollingTime);
const actualBody = requestBody.data;
const requestBody = req.body;
const topicName = req.body.topicName;
const key = requestBody.key || uuid();
const partition = requestBody.partition || undefined;
const data = Buffer.from(JSON.stringify(udpatedBody));
/**
* Actual messages are sent here when the producer is ready
*/
producer.on(kafkaEvents.READY, () => {
try {
producer.produce(
topic,
partition,
message,
key // setting key user provided or UUID
);
} catch (error) {
reject(error);
}
});
// Register listener for debug information; only invoked if debug option set in driver_options
producer.on(kafkaEvents.LOG, log => {
logger.info('Producer event log notification for debugging:', log);
});
// Register error listener
producer.on(kafkaEvents.ERROR, err => {
logger.error('Error from producer:' + JSON.stringify(err));
reject(err);
});
// Register delivery report listener
producer.on(kafkaEvents.PUBLISH_ACKNOWLEDGMENT, (err, ackMessage) => {
if (err) {
logger.error(
'Delivery report: Failed sending message ' + ackMessage.value
);
logger.error('and the error is :', err);
reject({ value: ackMessage.value, error: err });
} else {
resolve({
teamName: globalConfigs.TeamNameService,
topicName: ackMessage.topic,
key: ackMessage.key.toString()
});
}
});
});
};
Please note that kafkaEvents contains my constants for the events we listen to and it is just a reference such as kafkaEvents.LOG is same as event.log
and also the calling function is expecting this to a promise and accordingly we user .then(data => 'send your response to user from here') and .catch(error => 'send error response to user
this is how I achieved it using promises
I am new to JS and trying to write a simple webserver. Here is my prj browser.
When I go to browser -http://localhost:4000/home.html
I get the error - Cannot call method 'toString' of null.
The problem is that UrlResLoader.httpcode and UrlResLoader.fileType isn't defined
var UrlResLoader = new UrlLoader();
UrlResLoader.requestUrl(fileResEngine);
res.writeHead(UrlResLoader.httpcode, UrlResLoader.fileType);
res.write(UrlResLoader.data);
res.end()
I am not sure what is the problem here, I have hooked a debugger and have found that the problem happens on the fs.readFile(fileResEngine.fullpath, function (err, data).
I am still unclear on why is it happening. After researching a bit, I found that inorder to invoke closure functions, I should save the "this" pointer to refer the member varaibles. Other wise, the instance will be different.
But, this hasn't fixed the problem.
Also, any design flaws or comments will be welcomed here.
Here is my code -
The main file -
const http = require('http');
const parseUrl = require('parseurl');
const fileEngine = require('./fileErrHandler');
const UrlLoader = require('./urlController');
http.createServer( function (req, res)
{
try
{
// this is a library function
var pathName = decodeURIComponent(parseUrl(req).pathname);
var fileResEngine= new fileEngine(pathName);
// create a literal validateFile to validate the path
fileResEngine.pathCheck();
if (fileResEngine.error === true )
{
res.statusCode = fileResEngine.statusCode;
res.end(fileResEngine.ErrorMsg);
return;
}
else
{
var UrlResLoader = new UrlLoader();
UrlResLoader.requestUrl(fileResEngine);
res.writeHead(UrlResLoader.httpcode, UrlResLoader.fileType);
res.write(UrlResLoader.data);
res.end();
}
}
catch(err)
{
res.statusCode = err.status || 500;
res.end(err.message);
}
}).listen(4000);
The file error handler
var resolvePath = require('resolve-path');
const path = require('path');
var pagesDir = path.join(__dirname, 'Pages');
function pathCheckerEngine(path)
{
this.error = true;
this.path = path;
this.statusCode = 500;
this.ErrorMsg = "Internal Server Error";
this.PageRequest = "home.html";
this.extname = "html";
this.fullpath = './';
var pcEngine = this;
this.pathCheck = function()
{
try {
if (!path) {
pcEngine.statusCode = 400;
pcEngine.ErrorMsg = 'path required';
pcEngine.error = true;
}
else {
//removes first '/' of the path
pcEngine.PageRequest = path.substr(1);
pcEngine.fullpath = resolvePath(pagesDir, this.PageRequest);
pcEngine.statusCode = 200;
pcEngine.ErrorMsg = null;
pcEngine.error = false;
pcEngine.extname = this.PageRequest.split('.').pop();
}
}
catch(err)
{
pcEngine.statusCode = err.status || 500;
pcEngine.ErrorMsg = 'Malicious Page Request';
pcEngine.error = true;
}
}
}
module.exports = pathCheckerEngine;
And the final file
const fileEngine = require('./fileErrHandler');
const fs = require('fs');
const mime = require('mime');
function UrlController(fileResEngine) {
this.httpcode = null;
this.fileType = null;
this.data = null;
var urctrl = this;
this.requestUrl = function (fileResEngine) {
switch (fileResEngine.extname) {
case 'html':
fs.readFile(fileResEngine.fullpath, function (err, data) {
if (err)
{
console.log(err);
urctrl.httpcode = 404;
urctrl.data = "Page not found";
return;
}
urctrl.httpcode = 200;
urctrl.fileType = "'Content-Type': 'text/html'";
urctrl.data = data;
});
break;
case 'png':
case 'jpeg':
case 'jpg':
case 'bmp':
fs.readFile(fileResEngine.fullpath, function (err, data) {
if (err) {
console.log(err);
urctrl.httpcode = 404;
urctrl.data = "File not found";
return;
}
urctrl.httpcode = 200;
urctrl.fileType = mime.lookup(mime.lookup('./images' + req.url));
urctrl.data = data;
});
break;
default:
urctrl.httpcode = 404;
urctrl.data = "Page not Found"
break;
}
}
return;
}
module.exports = UrlController;
For starters, this is wrong:
var pathName = decodeURIComponent(parseUrl(req).pathname);
which is probably why your pathName later on is wrong. req is a request object, it's not a URL you can parse. The URL path is in that request object. req.url will contain the request path (without domain. port and protocol - just the path).
So, change that above line to this:
var pathName = decodeURIComponent(req.url);
I can't promise there aren't other errors in that block of code (I don't know that fileEngine module at all), but this is at least one thing to clear up that affects your pathName being wrong which can lead to the error you get later on when trying to use the pathName.
Now that you've fixed that, it appears you also have asynchronous problems. You've coded UrlController.requestUrl, but not coded any way to know when it is done with its asynchronous work. It returns right away and then sometime later, the fs.readFile() inside of it finishes. So you end up trying to use the properties of your object long before they have been set.
You will need to use either promises or a callback to let your caller know when your asynchronous operations are actually done. I'd suggest you read this canonical answer on the topic of returning async data.