app.get and app.post get value in node.js - javascript

I'm using the app.get and app.post functions in Express on Node.js.
I have examples like this:
app.post('/status', function(req, res) {
if (~packages.STATUSES.indexOf(req.body['status'])) {
res.status(req.body.status, req.body.message);
res.jsonp(new packages.Success('status updated'));
} else {
res.jsonp(new packages.Error('invalid status'));
}
});
This will work if I post the data to the sever, and I noticed that it gets the value by
req.body['status'];
What if I use GET and pass the value here? What should I do to get the 'status'?
app.get('/status', function(req, res) {
// how can I get status... var status = ??
if (~packages.STATUSES.indexOf(status) { //got value
res.status(req.body.status, req.body.message);
res.jsonp(new packages.Success('status updated'));
} else {
res.jsonp(new packages.Error('invalid status'));
}
});
Sorry if it sounds dumb but I did some research and couldn't find any examples online. Thanks for your help.

I would strongly suggest to use
req.param('status') //
it looks up req.body/req.query as well as req.params, so with this you can map all three below routes to the same method
app.get('/status',statusHanlder);
app.post('/status', statusHanlder);
app.get('/status/:status', statusHanlder)
var statusHanlder = function(req, res){
var status = req.param('status')
}

Use req.query.status or req.query['status'] for GET requests.

Related

How can I display the output(HTML) depending on its id on node js using sqlite

Here is my code on Node JS. And can you give an example what will i add in the HTML code. Thank you
exports.get = function(req, res){
db.all("SELECT * FROM f11", function(err,rows){
rows.forEach(function (row) {
console.log('row: ' + row.GIVENNAME);
});
// console.log(rows);
res.render('form11.ejs',{array:rows});
});
};
I see you are using sqlite3 npm package for sqlite.
After looking at your code, You can try this to achieve what you want:
change get route to have a param id like this in your server.js
app.get('/legone/survey/surveyform/form11/:id', form11.get);
And, write your get function like this:
exports.get = function(req,res){
var id = req.params.id;
db.all("SELECT * FROM f11 WHERE id = ?",id, function(err,rows){
rows.forEach(function (row) {
console.log('row: ' + row.GIVENNAME);
});
res.render('form11.ejs',{array:rows});
});
};
And your route will be something like this:
http://localhost/legone/survey/surveyform/form11/23
where 23 is the id. Put whatever you want in its place.
if you are using ajax calls, it will look like this:
$http.get('/legone/survey/surveyform/form11/23')
.success(function(response){
//handle response
})
.error(function(error){
//handle error
});
you can test the route via POSTMAN or something like that.
For more information, you can Read Sqlite3 Wiki API Documentation or, directly about params and Callbacks here.
Also, after looking at your code you need lot of restructuring. You should define all your routes in one file and then use it inside app.js.
Something like this :
//define all the routes that start from '/legone/survey/surveyform' in one file
//(let it be 'surveryRoutes.js')
//and then include it in your app.js.
var surveyRoutes = require(./routes/surveyRoutes);
app.use('/legone/survey/surveyform',surveyRoutes);
If you want to get an id and query on that then:
First you'll need to pass an id in the request query. Like http://localhost/api/getUser?id=23
exports.getUser = function(req, res){
var user_id = req.query.id;
db.all("SELECT * FROM f11 WHERE id="+user_id, function(err,row){
console.log(row); // Since you'll only get one row
res.render('form11_detail.ejs',{data:row});
});
};
If you add this code then for displaying only one you could use 'getUser' function and for displaying all data, you could use your 'get' function.

NodeJS restify API caching best practice

I am very new to NodeJS and I am building my first API using restify.
I want to find out what is best practice for caching the response data - each API call must have its own cache time.
I have looked at res.cache() but that seems to be only per user request and not a global application cache.
I then looked at restify-cache but the documentation did not clearly tell me how to use it.
My application works like this:
server.js code:
var restify = require('restify');
var mysqlDB = require('./config/connection');
// REST server declaration and configuration
var server = restify.createServer({
name: 'test-api',
version: '0.0.1'
});
server.pre(restify.pre.sanitizePath());
server.use(restify.queryParser());
server.use(restify.acceptParser(server.acceptable));
server.use(restify.queryParser());
server.use(restify.bodyParser());
server.listen(9007, function() {
console.log('%s listening at %', server.name, server.url);
mysqlDB.handleDisconnect();
console.log(new Date() +': Started Cricket API on port 9007');
});
var routes = require('./routes')(server);
routes.js code:
module.exports = function(app) {
app.get('/', function(req, res, next) {
return res.send("You have reached the test API");
});
var fixtures = require('./controllers/fixtures');
app.get('/getfixtures', fixtures.getFixtures); // Get All Fixtures
};
fixtures.js code snippet:
this.getFixtures = function (req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "X-Requested-With");
console.log("Get All Fixtures");
var mysql = mysqlDB.getConnection();
var query = "SELECT * FROM fixtures WHERE fixture_date >= CURDATE() ORDER BY fixture_date, fixture_time";
mysql.query(query,function(err,rows){
if(err) {
var status = mysqlDB.getErrorStatus(err.code);
return res.status(status.code).send("Error : "+ status.Message);
} else {
var data = [];
for (i in rows){
var item = rows[i];
var output = util.formatDate(item.fixture_date);
item.fixture_date = output;
data.push(item);
};
return res.send(data);
}
});
};
Can someone please send me in the right direction? I don't know where to add the caching part?
From the library file:
server.use(cache.before); is a middleware that will be triggered to load before the request is handled, going to Redis and checking if the header_{url} key and payload_{url} exits, and at that case the value is returned.
You could put it as mentioned in this gist:
https://gist.github.com/jeffstieler/3d84fa5468c7eadb7685
var server = restify.createServer({
name: 'test-api',
version: '0.0.1'
});
server.pre(restify.pre.sanitizePath());
server.use(cache.before);
server.use(restify.queryParser());
server.use(restify.acceptParser(server.acceptable));
server.use(restify.queryParser());
server.use(restify.bodyParser());
server.on('after', cache.after);
In your code I would add the cache.before after you sanitize the path as this will be saved in Redis. also a next() should be included in every route cached.
I ended up using node-cache
It was easy to use since I come from a Java/Play Framework background - hopefully it helps someone else in future.
Example usage:
var nodeCache = require( "node-cache" );
var myCache = new nodeCache();
var cachedValue = myCache.get("alltests", true);
if (cachedValue != undefined) {
return res.send(cachedValue);
} else {
// Do work here and then:
success = myCache.set("alltests", valueHere, cacheTime);
}

Different Get methods in Node-Express Rest API

I have been creating a website with Mean stack and I stuck at some point. I have a mongo db database and I am currently getting each file from database (to show them on Main page) with my Rest Api which is build with Express.
Server.js
var express = require('express');
var app = express();
var mongojs = require('mongojs');
var db = mongojs('mongodb://username...', ['myApp']);
var bodyParser = require('body-parser');
app.use(express.static(__dirname + '/public'));
app.use(bodyParser.json());
app.get('/myApp', function (req, res) {
db.myApp.find(function (err, docs) {
console.log(docs);
res.json(docs);
});
});
app.get('/myApp/:id', function (req, res) {
var id = req.params.id;
console.log(id);
db.myApp.findOne({_id: mongojs.ObjectId(id)}, function (err, doc) {
res.json(doc);
})
});
app.listen(3001);
console.log('Server running on port 3001');
There is 2 get method and I can understand that because they have different parameters. So when I call them from controllers, there is no problem because if I provide id, it will call the second get method. But for example I want to use something like this in my website;
app.get('/myApp', function (req, res) {
db.myApp.find({}).limit(2).skip(0, function(err, docs) {
console.log(docs);
res.json(docs);
});
});
This get method have no parameter like the first get method in server.js but they do different jobs. This is limiting my search with 2 file. How can I use different get methods like this in my Mean Stack application?
This is my code for calling get method from my main controller. How can I make sure to call specific get method? Thanks..
$http.get('/myApp').success(function(response) { .. });
What you want is not possible. Somehow you need to distinguish between your 2 intentions, either by giving the endpoints different names (like you already suggest in your comment) or by providing for example a query parameter so you could do a call like:
$http.get('/myApp?limit=2').success(function(response) { .. });
When limit is omitted, you could return all results.
Something like:
app.get('/myApp', function (req, res) {
var limit = req.query.limit;
if (limit === undefined) {
// Return everything
} else {
// make sure limit is some valid number
// ... and do a mongo query limited to this number
}
});

Express.js res.render() and res.redirect()

I have a route in my express app, which is supposed to do the following:
Get some data from outside (OK)
Show a HTML page with socket.io listening for messages (OK)
Perform some calculations, which take a long time
Send a message trough socket.io after each one one completed (OK)
When all calculations are completed, show a result page (problem)
So, a simplified version of my code is:
module.exports = function(io) { // This is so I can use socket.io inside the route
var express = require('express');
var router = express.Router();
[... and other required]
router.post('/', function(req, res, next) {
res.render('loading'); // This renders the template which holds accepts and renders socket.io messages
pefromCalculaton();
sendSocketIOMessage();
session.data = resultData; // I get the result of all calculations and put that in the session. It's a quick fix, don't judge, I've got no presistancy in my project as for now...
res.redirect('results'); // And here I'd like to go to the other route, which will display the data, getting it from the session.
});
return router;
}
Since this doesn't work, I am probably trying to do something really stupid here. But what I actually want to do is:
Perform calculations
While performing the calculations, update progress using sockets
When calculation is done, render a template, showing all the results.
Well, my friend, as you know that one can't send two responses from within one request. So here is what you need to do.
module.exports = function(io) { // This is so I can use socket.io inside the route
var express = require('express');
var router = express.Router();
[... and other required]
router.post('/', function(req, res, next) {
var taskId = (new Date()).getTime(); //timestamp to get unique task id.
res.render('loading');
startCalculations(function(progressIndicator){ //progress callback
io.emit('progress',progressIndicator);
},function(result){ // Finish callback
session[taskId] = result;
io.emit('finish',{ taskid: taskId });
});
});
router.get('/result:taskId', function(req, res, next) {
var result = session[req.params.taskId];
if(!result)
{
//Result already consumed!
res.render('expired');
return;
}
delete session[req.params.taskId];
res.render('result', {result: result});
});
//progress callback will be called when we want to report progress
//done callback indicates our results are ready.
function startCalculations(progress, done){
//This is just a stupid piece of code, just to emulate loading.
//Your awesome async code will replace this
//In your case it will not be a loop and there will be a callback
//To signal finish.
var result = 0;
for(i = 0; i < 100; i++)
{
result = result + i;
//Simulate progress
progress(i);
}
//Simulate finish -- result has 0 to 99 all added up ;)
done(result);
}
return router;
}
Now on the html front you can have ...
this is how your loading view would look like.
<script src="/socket.io/socket.io.js"></script>
<script src="http://code.jquery.com/jquery-1.11.1.js"></script>
<script>
var socket = io();
//Init code.
socket.on('progress', function(progressIndicator){
//Your jquery mojo to show progress
displayProgress(progressIndicator);
});
socket.on('finish', function(task){
//Redirect to result page from frontend (you were trying to do
//on backend -- node.js)
window.location.href = "/result/" + task.taskId;
//OR
//window.location.replace("/result/" + task.taskId);
});
</script>
Hope this makes sense and helps ...
Let me know if you need anything else.
Have fun!
Node is asynchronous. Use callbacks or promises to make sure that the result page is shown only when the calculations has been completed.

Node.js respond with asynchronous data

Recently I started learning a little bit about Node.js and it's capabilities and tried to use it for some web services.
I wanted to create a web service which will serve as a proxy for web requests.
I wanted my service to work that way:
User will access my service -> http://myproxyservice.com/api/getuserinfo/tom
My service will perform request to -> http://targetsite.com/user?name=tom
Responded data would get reflected to the user.
To implement it I used the following code:
app.js:
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
var proxy = require('./proxy_query.js')
function makeProxyApiRequest(name) {
return proxy.getUserData(name, parseProxyApiRequest);
}
function parseProxyApiRequest(data) {
returned_data = JSON.parse(data);
if (returned_data.error) {
console.log('An eror has occoured. details: ' + JSON.stringify(returned_data));
returned_data = '';
}
return JSON.stringify(returned_data);
}
app.post('/api/getuserinfo/tom', function(request, response) {
makeProxyApiRequest('tom', response);
//response.end(result);
});
var port = 7331;
proxy_query.js:
var https = require('https');
var callback = undefined;
var options = {
host: 'targetsite.com',
port: 443,
method: 'GET',
};
function resultHandlerCallback(result) {
var buffer = '';
result.setEncoding('utf8');
result.on('data', function(chunk){
buffer += chunk;
});
result.on('end', function(){
if (callback) {
callback(buffer);
}
});
}
exports.getUserData = function(name, user_callback) {
callback = user_callback
options['path'] = user + '?name=' + name;
var request = https.get(options, resultHandlerCallback);
request.on('error', function(e){
console.log('error from proxy_query:getUserData: ' + e.message)
});
request.end();
}
app.listen(port);
I wish I didn't screwed this code because I replaced some stuff to fit my example.
Anyway, the problem is that I want to post the response to the user when the HTTP request is done and I cant find how to do so because I use express and express uses asynchronous calls and so do the http request.
I know that if I want to do so, I should pass the makeProxyApiRequest the response object so he would be able to pass it to the callback but it is not possible because of asyn problems.
any suggestions?
help will be appreciated.
As you're using your functions to process requests inside your route handling, it's better to write them as express middleware functions, taking the specific request/response pair, and making use of express's next cascade model:
function makeProxyApiRequest(req, res, next) {
var name = parseProxyApiRequest(req.name);
res.locals.userdata = proxy.getUserData(name);
next();
}
function parseProxyApiRequest(req, res, next) {
try {
// remember that JSON.parse will throw if it fails!
data = JSON.parse(res.locals.userdata);
if (data .error) {
next('An eror has occoured. details: ' + JSON.stringify(data));
}
res.locals.proxyData = data;
next();
}
catch (e) { next("could not parse user data JSON."); }
}
app.post('/api/getuserinfo/tom',
makeProxyApiRequest,
parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
Even better would be to move those middleware functions into their own file now, so you can simply do:
var middleware = require("./lib/proxy_middleware");
app.post('/api/getuserinfo/tom',
middleware.makeProxyApiRequest,
middleware.parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
And keep your app.js as small as possible. Note that the client's browser will simply wait for a response by express, which happens once res.write, res.json or res.render etc is used. Until then the connection is simply kept open between the browser and the server, so if your middleware calls take a long time, that's fine - the browser will happily wait a long time for a response to get sent back, and will be doing other things in the mean time.
Now, in order to get the name, we can use express's parameter construct:
app.param("name", function(req, res, next, value) {
req.params.name = value;
// do something if we need to here, like verify it's a legal name, etc.
// for instance:
var isvalidname = validator.checkValidName(name);
if(!isvalidname) { return next("Username not valid"); }
next();
});
...
app.post("/api/getuserinfo/:name", ..., ..., ...);
Using this system, the :name part of any route will be treated based on the name parameter we defined using app.param. Note that we don't need to define this more than once: we can do the following and it'll all just work:
app.post("/api/getuserinfo/:name", ..., ..., ...);
app.post("/register/:name", ..., ..., ... );
app.get("/api/account/:name", ..., ..., ... );
and for every route with :name, the code for the "name" parameter handler will kick in.
As for the proxy_query.js file, rewriting this to a proper module is probably safer than using individual exports:
// let's not do more work than we need: http://npmjs.org/package/request
// is way easier than rolling our own URL fetcher. In Node.js the idea is
// to write as little as possible, relying on npmjs.org to find you all
// the components that you need to glue together. If you're writing more
// than just the glue, you're *probably* doing more than you need to.
var request = require("request");
module.exports = {
getURL: function(name, url, callback) {
request.get(url, function(err, result) {
if(err) return callback(err);
// do whatever processing you need to do to result:
var processedResult = ....
callback(false, processedResult);
});
}
};
and then we can use that as proxy = require("./lib/proxy_query"); in the middleware we need to actually do the URL data fetching.

Categories