Couldn't find an answer, so I'm asking here -
I'm writing an API in node.js (6.2.0) and I have a problem when I'm serving to multiple clients.
The code is -
"use strict";
const express = require('express');
const router = express.Router();
const FileRetriever = require('./FileRetriever');
function doSomething(uid, callback) {
let finalCount = 0;
let cb = null;
FileRetriever.foo(uid, function (err, data) {
finalCount = data.length;
cb = callback(finalCount);
data.forEach(function(obj, i) {
doSomething2(obj, cb);
});
})
}
function doSomething2(_obj, cb) {
let fn = null;
FileRetriever.bar(_obj, function(err, data){
cb(null, data);
})
}
router.route('/foo').get(function (req, res) {
let uid = req.query.uid;
function callback(_finalCount) {
let counter = 1;
let finalCount = _finalCount;
let output = [];
return function(err, data) {
output.push(data);
if (output.length === (finalCount -1)) {
res.send(output);
}
}
}
doSomething(uid, callback);
});
Obviously it's a bit more complicated than that, but this is the simplified version.
Please help me understand what am I missing -
This is what I have in mind on how it should work -
A user goes to /foo with a parameter uid.
He gets to this route and doSomething is invoked for this user.
In doSomething, I first get initial data that I invoke callback with that returns a function of its own which will be now known as cb.
When cb is passed to doSomething2 and get invoked there, it's still under the same stack, under the same user.
I ran a couple of tests, with one user that should return an output with the length of 6 and another with the length of 100.
When I run this code once per user, it all works fine, but if the route gets called at the same time by the two users, the lengths are not [6,100] (but they are always the same).
What am I doing wrong?
It should work...
Hope that I was clear, thanks.
Related
Please see my code below:
I am trying to assign the recordset to a variable, can use index.js to call this variable out.
I am able to console.log the recordset. But when I call this IIFE, it is always says undefined.
var mssql = require('mssql');
var dbcon = require('./dbcon');
var storage = (function () {
var connection = new mssql.Connection(dbcon);
var request = new mssql.Request(connection);
connection.connect(function (recordset) {
request.query('select getdate()', function (err, recordset) {
console.dir(recordset);
});
connection.close();
});
})();
module.exports = storage;
index.js
var storage = require('./storage');
"AMAZON.HelpIntent": function (intent, session, response) {
storage(function (recordset){
var speechOutput = 'Your result is '+recordset;
response.ask(speechOutput);
});
However, I can't get the recordset. I got "Your result is {object, object}. "
that's because the IIFE is executing right away, try returning a function instead and then executing that function when you import that module,
var storage = (function(mssql, dbcon) {
return function() {
var connection = new mssql.Connection(dbcon);
var request = new mssql.Request(connection);
connection.connect(function(recordset) {
request.query('select getdate()', function(err, recordset) {
console.dir(recordset);
});
connection.close();
});
}
})(mssql, dbcon);
and I don't understand why you need the IIFE, why don't you just assign the function to the variable?
If you're trying to assign the variable "recordset" to "storage" then this will never work as "connection.connect" is an asynchronous function, and in that case you should think about callback functions or promises.
Update
Based on your request, here's an implementation with a callback function and how it's used
var mssql = require('mssql');
var dbcon = require('./dbcon');
var storage = function(callback) {
var connection = new mssql.Connection(dbcon);
var request = new mssql.Request(connection);
connection.connect(function(recordset) {
request.query('select getdate()', function(err, recordset) {
if(!err && callback){
callback(recordset);
}
connection.close();
});
});
}
module.exports = storage;
// --------------------------------------------------
// implementation in another module
var storage = require("module_path"); // (1)
var answer;
storage(function(recordset){ // (2)
answer = recordset;
console.log(answer); // actual data, (3)
// implement your logic here
});
console.log(answer); // undefined (4)
// --------------------------------------------------
How this code works:
- You start by calling the storage method and sending it a callback method.
- The whole point of the callback function is that you won't wait for the result, your code will actually continue working at the same time that the storage method is connecting to the database and trying to get the data, ans since db operations are much slower, line(4) will execute before line(3).
- The flow of work will be as follows:
line (1)
line (2)
line (4)
line (3) at sometime in the future when the data is retrieved from database
- To see this more clearly, try doing this at the last line,
setTimeout(function(){console.log(answer);}, 3000);
This will wait for sometime until the data comes back;
the following lamdba code works perfectly fine when testing locally using Alex-app-server but when published and tested on AWS Lambda, it gets within the else statement and prints the console.log('OUT PUBLISH') But it doesn't publish the 'lambda/channelnumber' nor does it send the correct response back to me or print 'IN PUBLISH'
Any ideas why its just completing the bottom half of the else statement and not touching the publish function?
Code Snippet where I believe the problem lies
function (request, response) {
var channelNumber = request.slot('CHANNELNUMBER');
if (_.isEmpty(channelNumber)) {
var prompt = 'I didn\'t hear a channel code. Tell me a channel code.';
response.say(prompt).shouldEndSession(true);
return true;
} else {
//Doesn't publish any of this?????
thingShadows.publish('lambda/channelNumber', channelNumber, function () {
var prompt1 = 'Okay.';
response.say(prompt1).shouldEndSession(true);
console.log('in publish');
});
////But prints this??
console.log('out publish');
return true;
}
}
Full Code
'use strict';
module.change_code = 1;
var Alexa = require('alexa-app');
var skill = new Alexa.app('smartmote');
var awsIot = require('aws-iot-device-sdk');
var deviceName = "tv";
var _ = require('lodash');
var path = require('path');
var host = "XXXXXXXXXXXXXXXXXXXX.iot.us-east-1.amazonaws.com";
//App id is the skill being used.
var app_id = "amzn1.ask.skill.YYYYYYYYYYYYYYYYYYYYY";
var thingShadows = awsIot.thingShadow({
keyPath: path.join(__dirname, '/Raspi.private.key'),
certPath: path.join(__dirname, '/Raspi.cert.pem'),
caPath: path.join(__dirname, '/root-CA.crt'),
clientId: deviceName,
region: "us-east-1",
});
var reprompt = 'I didn\'t hear a channel, tell me a channel number or name to change to that channel';
skill.launch(function (request, response) {
var prompt = 'To change channel, tell me a channel number.';
response.say(prompt).reprompt(reprompt).shouldEndSession(true);
});
skill.intent('ChannelNumberIntent', {
'slots': {
'CHANNELNUMBER': 'CHANNELID'
},
'utterances': ['{|Change|put} {|the|on} {|channel} {|to} {-|CHANNELNUMBER}']
},
function (request, response) {
var channelNumber = request.slot('CHANNELNUMBER');
if (_.isEmpty(channelNumber)) {
var prompt = 'I didn\'t hear a channel code. Tell me a channel code.';
response.say(prompt).shouldEndSession(true);
return true;
} else {
thingShadows.publish('lambda/channelNumber', channelNumber, function () {
console.log('in pub');
var prompt1 = 'Okay.';
response.say(prompt1).shouldEndSession(true);
callback();
});
console.log('out pub');
return true;
}
}
);
module.exports = skill;
This is most likely because of the asynchronous nature of your code.
You haven't told us what thingShadows.publish() does, but it appears to take a callback function as its second argument. Presumably this function will be called when publish() has finished doing whatever it does.
When running locally I would imagine that the output you see is (in this order):
out publish
in publish
Notice that out publish gets called before in publish. This is because the publish method is asynchronous, so execution will continue as soon as it is called. In your case, you are calling return immediately after calling publish, which probably means your lambda job is ending before it has a chance to log in publish.
You haven't provided enough information about the rest of your lambda code/setup to provide a full answer, but you need to make sure that you are waiting for your publish method to have finished before continuing. One way to achieve this is to use the callback object that is passed to your lambda handler:
exports.myHandler = function(event, context, callback) {
// Other code
thingShadows.publish('lambda/channelNumber', channelNumber, function () {
var prompt1 = 'Okay.';
response.say(prompt1).shouldEndSession(true);
console.log('in publish');
// When the publish method is complete, we can call `callback`
// to tell lambda we are done
callback();
});
}
I want to understand how this code work in node.js/expressjs framework.
My assumtion is like this:
1. A request comes in and it is handled with thatRequest route.
2. B request comes in and the thatRequest handles that too.
3. There are no shared variables between A and B connection.
The connections are treated privately. They share some common configs but nothing more.
A comes for a computer and B comes from another computer.
My understanding is that each time checkStates is called it starts with virgin version variables defined outside the function.
What will be the case of using node cluster which spawn a new process for each processor an environment has?
// in thatRequest.js file
const RETRY_EVERY = 3; // seconds
const MAX_RETRY = 10; // maximum retry times
let checkInterval = 0;
let remainingRetry = 0;
async function checkStates (req, res) {
remainingRetry = !remainingRetry ? MAX_RETRY * 1 : remainingRetry--;
if (remainingRetry < 1) {
clearInterval(checkInterval);
}
}
}
const request = (req, res) => {
checkInterval = setInterval(() => {
checkStates(req, res);
}, RETRY_EVERY * 1000);
return checkStates(req, res);
};
export default request;
// in a routes.js file
import thatRequest from 'thatRequest';
export default (req, res) => {
thatRequest(req, res);
};
I'm trying to make it so that I can pass my trends variable from its function into a renderer for my Pug template, and I can't seem to do it.
var express = require('express');
var router = express.Router();
var googleTrends = require('google-trends-api');
var auth = require('http-auth');
var ustrends;
var uktrends;
const Console = require('console').Console;
var basic = auth.basic({
realm: "Web."
}, function (username, password, callback) { // Custom authentication method.
callback(username === "user" && password === "pass");
}
);
var find = ',';
var regex = new RegExp(find, 'g');
googleTrends.hotTrends('US').then(function(trends){
ustrends = trends
});
googleTrends.hotTrends('EU').then(function(trends1) {
uktrends = trends1
});
console.log(ustrends);
/* GET home page. */
router.get('/', auth.connect(basic), function(req, res, next) {
res.render('index', {trends: ustrends.toString().replace(regex, ", "), trends1: uktrends.toString().replace(regex, ", "), title: 'Trends in the U.S & U.K'});
});
module.exports = router;
As you can see, I'm trying to pass the "ustrends" and "uktrends" variables into the renderer. Any help is appreciated.
Remember that hotTrends will return a promise, as it's getting results from Google's API. Since the renderer is outside of the callbacks wherein ustrends and uktrends are set to values, there's no guarantee these values will be set prior to the renderer being called.
You could use several nested callbacks, but that would lead to some code pushed pretty far to the right; I recommend the async library, which has a function called series that allows you to pass in 1) an array of functions to be executed in order and 2) a callback that will be executed after the functions have completed that takes an error if there was one and the result of the functions as an argument. In the snippet below, the trends API returns results prior to the renderer being called:
async.series([
function(cb) {
googleTrends.hotTrends('US').then(function(trends){
ustrends = trends;
cb();
})
},
function(cb) {
googleTrends.hotTrends('EU').then(function(trends1) {
uktrends = trends1;
cb();
});
}
], function(err, results) {
/* handle errors, do rendering stuff */
})
I have got a Node.JS server that requests data from two web servers: bbc.co.uk and sky.com. Then the RSS feeds are parsed, and a user sees two lists: from BBC and from sky.
Here is the code.
var feed = require('feed-read');
var http = require('http');
var async = require('async');
var request = require('request');
var LIMIT = 10;
var UNABLE_TO_CONNECT = "Unable to connect.";
var BBC_URL = 'http://feeds.bbci.co.uk/news/rss.xml';
var SKY_URL = 'http://news.sky.com/feeds/rss/home.xml';
var server = http.createServer(onRequest);
server.listen(9000);
function onRequest(req, res) {
res.writeHead(200, {
'Content-Type' : 'text/html; charset=utf-8'
});
async.parallel([ function(callback) {
feed(BBC_URL, onRssFetched);
// TODO: where to call callback()?
}, function(callback) {
feed(SKY_URL, onRssFetched);
// TODO: where to call callback()?
} ], function done(err, results) {
console.log("Done");
if (err) {
throw err;
}
});
}
function onRssFetched(err, articles) {
console.log("RSS fetched");
var html = [];
if (err) {
html.push("<p>", UNABLE_TO_CONNECT = "</p>");
} else {
html.push("<ol>");
var i = 0;
articles.forEach(function(entry) {
if (i == LIMIT) {
return;
}
html.push("<li><a href='" + entry.link + "'>" + entry.title
+ "</a></li>");
i++;
});
}
console.log(html.join(""));
}
Now I don't know how to add the result to the web page. If I call callback() right after calling the feed method, callback() will be executed without waiting until feed has completed its job. On the other hand, I can't pass callback to feed. Maybe the approach is wrong, and I need some other module for RSS parsing.
#Maksim I know your original question included the async module, but propose an alternative:
why not stream each article to the client as it comes in rather than waiting for all RSS feeds to return before sending a response...?
By using async.parallel you are telling node:
"wait until we have a response from all these news services
and only then (combine the articles into) a single response to the client ..."
This uses up memory for each connected client while you wait for all responses (from the RSS news services) ... wasteful.
So I've written my answer without resorting to async.
And, instead of waiting for ages (while async combines all the feeds into one),
the client sees news as soon as the first rss feed returns!
var feed = require('feed-read'), // require the feed-read module
http = require("http"),
urls = [
"http://feeds.bbci.co.uk/news/rss.xml",
"http://news.sky.com/feeds/rss/home.xml",
"http://www.techmeme.com/feed.xml"
]; // Example RSS Feeds
http.createServer(function (req, res) {
// send basic http headers to client
res.writeHead(200, {
"Content-Type": "text/html",
"Transfer-Encoding": "chunked"
});
// setup simple html page:
res.write("<html>\n<head>\n<title>RSS Feeds</title>\n</head>\n<body>");
// loop through our list of RSS feed urls
for (var j = 0; j < urls.length; j++) {
// fetch rss feed for the url:
feed(urls[j], function(err, articles) {
// loop through the list of articles returned
for (var i = 0; i < articles.length; i++) {
// stream article title (and what ever else you want) to client
res.write("<h3>"+articles[i].title +"</h3>");
// check we have reached the end of our list of articles & urls
if( i === articles.length-1 && j === urls.length-1) {
res.end("</body>\n</html>"); // end http response
} // else still have rss urls to check
} // end inner for loop
}); // end call to feed (feed-read) method
} // end urls for loop
}).listen(9000);
Key Advantages:
The people connecting to your app will see news/results Much faster (almost instantly!)
Your app uses much less memory
You don't have to edit/update any code when you add new RSS news feeds!
For even more detail/notes on this solution
see: https://github.com/nelsonic/node-parse-rss
No, you don't need another library. But what you need to do is to hand over callback to your feed function instead of onRssFetched. This way the single RSS feeds are handed over to the final callback in your async.parallel call, using the result variable.
In this variable you then have access to both RSS feeds at the same time, and you can do whatever you want to do with them.
So, basically your logic needs to be:
async.parallel({
bbc: function (callback) {
feed(BBC_URL, callback);
},
sky: function (callback) {
feed(SKY_URL, callback);
}
}, function (err, result) {
if (err) {
// Somewhere, something went wrong…
}
var rssBbc = result.bbc,
rssSky = result.sky;
// Merge the two feeds or deliver them to the client or do
// whatever you want to do with them.
});
And that's it :-).
To amplify #nelsonic's answer (enough that I feel this warrants its own answer), feed-parse already processes asynchronously. At its heart, it's still running on http.request. If you look at the code, you see that you can even pass in an array of URLs directly and it will loop through them, but it uses more of an "async.eachSeries" approach, where the next call only occurs after the previous one completes, which appears not to be what you're looking for.
If you truly want to wait for calls to complete first before handling them, you're better off asynchronously buffering the data, then using underscore's _.after() to run after all URLs have finished.
But odds are, what you really want to do (unless you're just looking for an example to try out async) is #nelsonic's answer.
I would ideally stream the rss data, instead of aggregating in memory. #nelsonic has explained the correct approach to solve this problem.
Still, if we were to make your code running, consider following code:
var util = require('util');
var http = require('http');
var async = require('async');
var feed = require('feed-read');
var request = require('request');
var LIMIT = 10;
var UNABLE_TO_CONNECT = 'Unable to connect.';
var BBC_URL = 'http://feeds.bbci.co.uk/news/rss.xml';
var SKY_URL = 'http://news.sky.com/feeds/rss/home.xml';
var server = http.createServer(onRequest);
server.listen(9000);
function onRequest(req, res) {
util.log('Request recieved!');
res.writeHead(200, {
'Content-Type': 'text/html; charset=utf-8'
});
async.parallel({
bbc: function (callback) {
feed(BBC_URL, function (err, articles) {
var html = onRssFetched(err, articles);
callback(err, html);
});
},
sky: function (callback) {
feed(SKY_URL, function (err, articles) {
var html = onRssFetched(err, articles);
callback(err, html);
});
}
}, done);
function done(err, results) {
util.log('Received results: ' + Object.keys(results).join(','));
if (!err && results) {
var entry, html;
for (entry in results) {
html = results[entry];
res.write(html.join(''));
}
util.log('Send complete!');
res.end();
} else {
console.log(err || 'no data in results');
res.end('Unable to process your request');
}
}
}
function onRssFetched(err, articles) {
// limit number of articles;
articles = articles.slice(0, LIMIT);
var html = [];
if (err) {
html.push('<p>', UNABLE_TO_CONNECT = '</p>');
} else {
html.push('<ol>');
articles.forEach(function (entry) {
html.push('<li>' + entry.title + '</li>');
});
html.push('</ol>');
}
return html;
}
// -- Test Code ---------------------------------------------------------
if (require.main === module) {
(function () {
var req, res = {
writeHead: console.log,
write: console.log,
end: console.log
};
// onRequest(req, res);
})();
}
Let me know if you face any issues.