Node.js respond with asynchronous data - javascript

Recently I started learning a little bit about Node.js and it's capabilities and tried to use it for some web services.
I wanted to create a web service which will serve as a proxy for web requests.
I wanted my service to work that way:
User will access my service -> http://myproxyservice.com/api/getuserinfo/tom
My service will perform request to -> http://targetsite.com/user?name=tom
Responded data would get reflected to the user.
To implement it I used the following code:
app.js:
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
var proxy = require('./proxy_query.js')
function makeProxyApiRequest(name) {
return proxy.getUserData(name, parseProxyApiRequest);
}
function parseProxyApiRequest(data) {
returned_data = JSON.parse(data);
if (returned_data.error) {
console.log('An eror has occoured. details: ' + JSON.stringify(returned_data));
returned_data = '';
}
return JSON.stringify(returned_data);
}
app.post('/api/getuserinfo/tom', function(request, response) {
makeProxyApiRequest('tom', response);
//response.end(result);
});
var port = 7331;
proxy_query.js:
var https = require('https');
var callback = undefined;
var options = {
host: 'targetsite.com',
port: 443,
method: 'GET',
};
function resultHandlerCallback(result) {
var buffer = '';
result.setEncoding('utf8');
result.on('data', function(chunk){
buffer += chunk;
});
result.on('end', function(){
if (callback) {
callback(buffer);
}
});
}
exports.getUserData = function(name, user_callback) {
callback = user_callback
options['path'] = user + '?name=' + name;
var request = https.get(options, resultHandlerCallback);
request.on('error', function(e){
console.log('error from proxy_query:getUserData: ' + e.message)
});
request.end();
}
app.listen(port);
I wish I didn't screwed this code because I replaced some stuff to fit my example.
Anyway, the problem is that I want to post the response to the user when the HTTP request is done and I cant find how to do so because I use express and express uses asynchronous calls and so do the http request.
I know that if I want to do so, I should pass the makeProxyApiRequest the response object so he would be able to pass it to the callback but it is not possible because of asyn problems.
any suggestions?
help will be appreciated.

As you're using your functions to process requests inside your route handling, it's better to write them as express middleware functions, taking the specific request/response pair, and making use of express's next cascade model:
function makeProxyApiRequest(req, res, next) {
var name = parseProxyApiRequest(req.name);
res.locals.userdata = proxy.getUserData(name);
next();
}
function parseProxyApiRequest(req, res, next) {
try {
// remember that JSON.parse will throw if it fails!
data = JSON.parse(res.locals.userdata);
if (data .error) {
next('An eror has occoured. details: ' + JSON.stringify(data));
}
res.locals.proxyData = data;
next();
}
catch (e) { next("could not parse user data JSON."); }
}
app.post('/api/getuserinfo/tom',
makeProxyApiRequest,
parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
Even better would be to move those middleware functions into their own file now, so you can simply do:
var middleware = require("./lib/proxy_middleware");
app.post('/api/getuserinfo/tom',
middleware.makeProxyApiRequest,
middleware.parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
And keep your app.js as small as possible. Note that the client's browser will simply wait for a response by express, which happens once res.write, res.json or res.render etc is used. Until then the connection is simply kept open between the browser and the server, so if your middleware calls take a long time, that's fine - the browser will happily wait a long time for a response to get sent back, and will be doing other things in the mean time.
Now, in order to get the name, we can use express's parameter construct:
app.param("name", function(req, res, next, value) {
req.params.name = value;
// do something if we need to here, like verify it's a legal name, etc.
// for instance:
var isvalidname = validator.checkValidName(name);
if(!isvalidname) { return next("Username not valid"); }
next();
});
...
app.post("/api/getuserinfo/:name", ..., ..., ...);
Using this system, the :name part of any route will be treated based on the name parameter we defined using app.param. Note that we don't need to define this more than once: we can do the following and it'll all just work:
app.post("/api/getuserinfo/:name", ..., ..., ...);
app.post("/register/:name", ..., ..., ... );
app.get("/api/account/:name", ..., ..., ... );
and for every route with :name, the code for the "name" parameter handler will kick in.
As for the proxy_query.js file, rewriting this to a proper module is probably safer than using individual exports:
// let's not do more work than we need: http://npmjs.org/package/request
// is way easier than rolling our own URL fetcher. In Node.js the idea is
// to write as little as possible, relying on npmjs.org to find you all
// the components that you need to glue together. If you're writing more
// than just the glue, you're *probably* doing more than you need to.
var request = require("request");
module.exports = {
getURL: function(name, url, callback) {
request.get(url, function(err, result) {
if(err) return callback(err);
// do whatever processing you need to do to result:
var processedResult = ....
callback(false, processedResult);
});
}
};
and then we can use that as proxy = require("./lib/proxy_query"); in the middleware we need to actually do the URL data fetching.

Related

Node.js, ajax - Frontend routing

I'm working on a SPA website with node.js, jQuery, mongoose and MongoDB for a shopping website.
The ajax requests and responses work perfectly when starting from the index.html file. So for example begining on http://localhost:3000 and someone clicks on a link called 'products' I send an ajax request to the the server and the server sends the necessary product information back asynchronously which lead to http://localhost:3000/products. But the problem is that if someone types http://localhost:3000/products directly in the search bar it will show the json representation of the products.
This is my code:
script.js
function redirect (link) {
$.ajax({
type: 'GET',
url: 'http://localhost:3000/' + link,
contentType: 'application/json',
data: {
link
},
success: function (res) {
let container = $('#contentToSwap');
container.html('');
res.products.forEach(function (products_) {
...
});
}
});
}
app.js
var Product = require('./models/product');
var mongoose = require('mongoose');
var bodyParser = require('body-parser');
var path = require('path');
var express = require('express');
var app = express();
mongoose.connect('mongodb://localhost:27017/shopping');
var PORT = process.env.PORT || 3000;
app.use(express.static(path.join(__dirname, 'public')));
app.use(bodyParser.json());
app.get('*', function(req, res) {
Product.find(function(err, docs) {
let productChunks = [];
let chunksize = 4;
let display = [];
for (var i = 0; i < docs.length; i++) {
if (docs[i].productType == req.query.link) display.push(docs[i]);
}
for (var i = 0; i < display.length; i += chunksize) {
productChunks.push(display.slice(i, i + chunksize));
}
res.send({ products: productChunks });
});
});
app.listen(PORT, function () {
console.log('Listening on port ' + PORT);
});
So I need some sort of frontend routing if the user doesn't start at the index.html file. I know that I could write my own router to route the urls correctly and that I could route all requests back to the index.html like
app.get('*', function(req, res) {
res.sendFile(__dirname + '/public/index.html');
});
But then I cannot load all the necessary product information from the server when someone clicks a link. So I'm a little bit confused on hwo to tackle this issue. Any help is appreciated
This is usually achieved by separating api routes from normal ones by adding specific url prefixes such as /api for all routes that return json data. What you can do is to specify /api/whatever-you-want, make it the target for your ajax call and place it above app.get('*' ....
Since routes and middleware functions are resolved top to bottom, it will be matched by your ajax call only, leaving the /products unaffected.
answer to question -- Is it possible to redirect user from /api/products to /product if the request wasn't generated by ajax?
Yes, it is possible by adding request query parameter to ajax call which will not be present on normal call and then check those on the server side and decided what to do if it (that specific query parameter) is missing or not.
Let's assume some client side JS that generates ajax call.
fetch('/api/products?api=true')
.then((data) => data.json())
.then((json) => console.log(json));
Notice the request url - /api/products?api=true
Now assume a normal call from html file.
products
These two calls differ in that api query parameter (ajax call has it, the other one doesn't).
And for the server side part of the task -- request query parameters object can be accessed via query property on request object (req.query).
app.get('/api/products', (req, res) => {
if (!req.query.api) {
// if get request doesn't contain api param. then
// handle it accordingly, e.g. do redirect
return res.redirect('/products');
}
// request comming from ajax call, send JSON data back
res.json({ randomStuff: 'abcd' });
});

Using more than one unirest requests with Sails

I have the following code
index: function (req, res) {
var Request = unirest.get("https://poker.p.mashape.com/index.php?players=4").headers({ "X-Mashape-Authorization": "xxxxxxxxxxxxxxxxx" }).end(function (response) {
players = response.body;
showdown_total = players.showdown.length;
showdown = Array();
});
console.log(players);
// Send a JSON response
res.view({
hello: 'world',
//players: players
});
},
It works great if I add the res.view inside unirest get, but I want to send those variables to the view and be able to add another unirest request
Thanks for your help
That is how asynchronous code works in Node.js.
Basically, when an operation doesn't evaluate ASAP, node doesn't wait for it. It just says, "fine, no worries, just tell me when you are done".. sort of.
The thing is, in your code, you don't tell node when your get request. is done. You just fire away the view to the client before the request function even starts thinking about fetching the data.
How to make node wait ?
You have some options. Either, give it a callback function (do this when you are done), or you have to nest your functions. Those two are kind of the same thing really.
I'll show you one solution, nested functions:
var urlOne = "https://poker.p.mashape.com/index.php?players=4",
urlTwo = "http://some.other.url",
headers = { "X-Mashape-Authorization": "xxxxxxxxxxxxxxxxx" };
// Run first request
unirest.get(urlOne).headers(headers).end(function (response) {
players = response.body;
showdown_total = players.showdown.length;
showdown = Array();
// Run second request
unirest.get(urlTwo).headers(headers).end(function (response) {
someVar = response.body;
// Show all my data to the client
res.view({
players: players,
someOther: someVar
});
});
});
Other solutions:
If you don't want to nest the functions, give them a callback to run when they are done.
Use a module for handling asynchronous code, for example one of the more popular ones called Async.
I would suggest you to read more about callbacks, asynchronous code and nodejs before jumping directly on the external libraries.
There is another way....you could use fibers!
Read some docs here!
var sync = require('synchronize');
index: function (req, res) {
sync.fiber(function(){
var response = sync.await(
unirest.get("https://poker.p.mashape.com/index.php?players=4").headers(
{ "X-Mashape-Authorization": "xxxxxxxxxxxxxxxxx" }
).end(sync.defer())
);
var players = response.body;
console.log(players);
// Send a JSON response
res.view({
hello: 'world',
players: players
});
});
}

JSONP support with node.js and mongo

I'm attempting to create a simple web service using node.js, express, monk, and mongodb which returns results from mongodb based on the params in the URL. I want to add jsonp support to the calls. The service will be called as such:
localhost:3000/database/collection/get?param1=Steve&param2=Frank&callback=foo
app.js
var mongo_address = 'localhost:27017/database';
var db = monk(mongo_address);
app.get('/:coll/get', routes.handle(db);
routes/index.js
exports.handle = function(db) {
return function(req, res) {
// Send request
db.get(req.params.coll).find(req.query, {fields:{_id:0}}, function(e,docs) {
if (e) throw e;
res.jsonp(docs)
});
};
}
When I use the built in JSONP support with res.jsonp, it sends the callback param to mongo and returns an empty list. I've tried stripping out the callback param during the query and then manually adding it back to the results without much luck. I feel like I'm missing something simple. Any help would be appreciated.
After some messing around with JS, I found a workable solution with minimal additional code. AFter stripping the callback from the query and storing the function value, I had to explicitly build the return string for JSONP requests.
exports.handle = function(db) {
return function(req, res) {
//Determine if URL implements JSONP
var foundCallback = req.query.callback;
var callbackVar;
//If asking for JSONP, determine the callback function name and delete it from the map
if (foundCallback){
callbackVar = req.query.callback;
delete req.query.callback
}
// Send request
db.get(req.params.coll).find(req.query, {fields:{_id:0}}, function(e,docs) {
if (e) throw e;
//If callback, send function name and query results back, else use express JSON built in
if (foundCallback)
res.send('typeof ' + callbackVar + ' === \'function\' && ' + callbackVar + '(' + JSON.stringify(docs) + ');');
else
res.json(docs);
});
};
}
Try
app.set("jsonp callback", true);

Connect signed cookie parsing falsy

I'm having problems while trying to parse back signed cookies in express/connect application.
io.set('authorization', function (handshakeData, callback) {
if(handshakeData.headers.cookie) {
var signedCookies = cookie.parse(decodeURIComponent(handshakeData.headers.cookie));
handshakeData.cookie = connect.utils.parseSignedCookies(signedCookies, secret);
} else {
return accept('No cookie transmitted', false);
}
callback(null, true); // error first callback style
});
What happens is call to connect.utils.parseSignedCookies returns empty object. I looked into source for parse function and found out that it calls unsign method which gets a substring of encoded value and then tries to sign it again with the same secret and compare the results to verify that its the same value encoded and for some reasons it fails and values does not match. I don't know what I'm doing wrong, why those values differs and why I'm unable to get correct session ID.
My app initialization code looks like this:
app.use(express.cookieParser(secret));
app.use(express.session({
key: 'sessionID',
secret: secret,
maxAge: new Date(Date.now() + 3600000),
store: new RedisStore({
client: redisClient
})
}));
Please help and point what I'm doing wrong here. Thank you
The cookie parser is a middleware, so we have to use it like one. It will actually populate the object that you pass to it. This is how you would want to be using the parser:
// we need to use the same secret for Socket.IO and Express
var parseCookie = express.cookieParser(secret);
io.set('authorization', function(handshake, callback) {
if (handshake.headers.cookie) {
// pass a req, res, and next as if it were middleware
parseCookie(handshake, null, function(err) {
// use handshake.signedCookies, since the
// cookie parser has populated it
});
} else {
return accept('No session.', false);
}
callback(null, true);
});
The cookie parser API changed and this is what it looks like now:
module.exports = function cookieParser(secret) {
return function cookieParser(req, res, next) {
if (req.cookies) return next();
var cookies = req.headers.cookie;
req.secret = secret;
req.cookies = {};
req.signedCookies = {};
if (cookies) {
try {
req.cookies = cookie.parse(cookies);
if (secret) {
req.signedCookies = utils.parseSignedCookies(req.cookies, secret);
req.signedCookies = utils.parseJSONCookies(req.signedCookies);
}
req.cookies = utils.parseJSONCookies(req.cookies);
} catch (err) {
err.status = 400;
return next(err);
}
}
next();
};
};
So what we're doing is passing handshake as a request object, and the parser will read the headers.cookie property. Then, the cookies will be parsed, and put into req.signedCookies. Since we passed handshake as req, the cookies are now in handshake.signedCookies. Note that the cookies are only signed because you passed a secret to the parser.
I was having problems left and right with cookies/sessions/socket.io etc. It was finally #vytautas comment that helped me. In case anyone sees this, please make sure you're connecting to the correct host, whether you have it setup as localhost or an IP address or what have you. Otherwise you won't be able to parse your incoming cookies.
(Seems kind of obvious in hindsight.)

Creating a Mockup REST API

I'm currently trying to create a NodeJS server or something similar to mockup a REST API that reads in a JSON file and responds to requests with that data. I really only need GET requests supported. What is the best way to go about this?
Here's what I have so far:
/**
* Sample items REST API
*/
function ItemsRepository() {
this.items = [];
}
ItemsRepository.prototype.find = function (id) {
var item = this.items.filter(function(item) {
return item.itemId == id;
})[0];
if (null == item) {
throw new Error('item not found');
}
return item;
}
/**
* Retrieve all items
* items: array of items
*/
ItemsRepository.prototype.findAll = function () {
return this.items;
}
/**
* API
*/
var express = require('express');
var app = express();
var itemRepository = new ItemsRepository();
app.configure(function () {
// used to parse JSON object given in the body request
app.use(express.bodyParser());
});
/**
* HTTP GET /items
* items: the list of items in JSON format
*/
app.get('/items', function (request, response) {
response.json({items: itemRepository.findAll()});
});
/**
* HTTP GET /items/:id
* Param: :id is the unique identifier of the item you want to retrieve
* items: the item with the specified :id in a JSON format
* Error: 404 HTTP code if the item doesn't exists
*/
app.get('/items/:id', function (request, response) {
var itemId = request.params.id;
try {
response.json(itemRepository.find(itemId));
} catch (exception) {
response.send(404);
}
});
app.listen(8080); //to port on which the express server listen
I know that I would use the following to include the file, I just don't know how to stuff the data into Items.
var responseItemsData = require('./items-list.json');
This is trivial in node. You can load the data by requiring the .json file directly
var responseData = require('./my-json-file'); //.json extension optional
//Do this during your startup code, not during the request handler
Then to send it:
res.write(JSON.stringify(responseData));
The rest of the code you need is easily available in almost every node.js tutorial on the web.
You can use jasmine+sinon:
var Episode = Backbone.Model.extend({
url: function() {
return "/episode/" + this.id;
}
});
beforeEach(function() {
this.server = sinon.fakeServer.create();
});
afterEach(function() {
this.server.restore();
});
it("should fire the change event", function() {
var callback = sinon.spy();
this.server.respondWith("GET", "/episode/123",
[200, {"Content-Type": "application/json"},'{"id":123,"title":"Hollywood - Part 2"}']);
var episode = new Episode({id: 123});
// Bind to the change event on the model
episode.bind('change', callback);
// makes an ajax request to the server
episode.fetch();
// Fake server responds to the request
this.server.respond();
// Expect that the spy was called with the new model
expect(callback.called).toBeTruthy();
expect(callback.getCall(0).args[0].attributes)
.toEqual({id: 123,
title: "Hollywood - Part 2"});
});
more details in: https://github.com/cld-santos/simplologia/tree/master/javascript-lessons/src/test/javascript/Sinon
The easiest way is to simply use the static middleware.
var express = require('express');
var app = express();
app.use('/api', express.static(__dirname + '/data'));
app.use('.*', express.static(__dirname + '/assets'));
This assumes that you are eventually going to put the REST api at /api but that while you're testing your data is going to be in the data directory and that your CSS/JS/HTML is in the assets folder. In reality you can put it whereever you want, but you now can put all your dev json separate from your code.
I created a tool for this purpose
https://github.com/homerquan/kakuen

Categories