var express = require('express')
var app = module.exports = express()
function getImages(callback) {
callback()
}
app
.set('views', __dirname + '/views')
.set('view engine', 'jade')
.get('/stream/images', function(req, res) {
res.render('layout', function(err, body) {
res.type('html')
res.write(body.replace('</body></html>', ''))
getImages(function(err) {
res.render('streams/images', function(err, body) {
var html = '<div class="layout-stream">' + body + '</div>'
res.write(
"<script>$('.pagelet-layout-stream').replaceWith(" +
JSON.stringify(html) +
"); emitter.emit('check:stream');</script>"
)
res.write("<script>emitter.emit('load');</script>")
res.write('</body></html>')
res.end()
})
})
})
})
.listen(3013)
This works exactly as intended.
However, when I .use(express.compress()) (before .get to be exact), the page no longer streams.
In other words, the express.compress() seems to wait until the response is finished, then gzip the entire response, then send it.
I would like each res.write to send a gzipped response (or specifically, every drain).
How can I compress the response correctly?
Edit 1 - I tried this:
var stream = zlib.createGzip()
stream.pipe(res)
stream.write(/* stuff */)
stream.end()
This works exactly like express.compress(). I'm not sure whether the browser just doesn't parse the body until it has the whole thing, or it doesnt send the body until the response is complete.
var stream = zlib.createGzip()
stream._flush = zlib.Z_SYNC_FLUSH
stream.pipe(res)
stream.write(/* stuff */)
stream.end()
Related
I'm working on a SPA website with node.js, jQuery, mongoose and MongoDB for a shopping website.
The ajax requests and responses work perfectly when starting from the index.html file. So for example begining on http://localhost:3000 and someone clicks on a link called 'products' I send an ajax request to the the server and the server sends the necessary product information back asynchronously which lead to http://localhost:3000/products. But the problem is that if someone types http://localhost:3000/products directly in the search bar it will show the json representation of the products.
This is my code:
script.js
function redirect (link) {
$.ajax({
type: 'GET',
url: 'http://localhost:3000/' + link,
contentType: 'application/json',
data: {
link
},
success: function (res) {
let container = $('#contentToSwap');
container.html('');
res.products.forEach(function (products_) {
...
});
}
});
}
app.js
var Product = require('./models/product');
var mongoose = require('mongoose');
var bodyParser = require('body-parser');
var path = require('path');
var express = require('express');
var app = express();
mongoose.connect('mongodb://localhost:27017/shopping');
var PORT = process.env.PORT || 3000;
app.use(express.static(path.join(__dirname, 'public')));
app.use(bodyParser.json());
app.get('*', function(req, res) {
Product.find(function(err, docs) {
let productChunks = [];
let chunksize = 4;
let display = [];
for (var i = 0; i < docs.length; i++) {
if (docs[i].productType == req.query.link) display.push(docs[i]);
}
for (var i = 0; i < display.length; i += chunksize) {
productChunks.push(display.slice(i, i + chunksize));
}
res.send({ products: productChunks });
});
});
app.listen(PORT, function () {
console.log('Listening on port ' + PORT);
});
So I need some sort of frontend routing if the user doesn't start at the index.html file. I know that I could write my own router to route the urls correctly and that I could route all requests back to the index.html like
app.get('*', function(req, res) {
res.sendFile(__dirname + '/public/index.html');
});
But then I cannot load all the necessary product information from the server when someone clicks a link. So I'm a little bit confused on hwo to tackle this issue. Any help is appreciated
This is usually achieved by separating api routes from normal ones by adding specific url prefixes such as /api for all routes that return json data. What you can do is to specify /api/whatever-you-want, make it the target for your ajax call and place it above app.get('*' ....
Since routes and middleware functions are resolved top to bottom, it will be matched by your ajax call only, leaving the /products unaffected.
answer to question -- Is it possible to redirect user from /api/products to /product if the request wasn't generated by ajax?
Yes, it is possible by adding request query parameter to ajax call which will not be present on normal call and then check those on the server side and decided what to do if it (that specific query parameter) is missing or not.
Let's assume some client side JS that generates ajax call.
fetch('/api/products?api=true')
.then((data) => data.json())
.then((json) => console.log(json));
Notice the request url - /api/products?api=true
Now assume a normal call from html file.
products
These two calls differ in that api query parameter (ajax call has it, the other one doesn't).
And for the server side part of the task -- request query parameters object can be accessed via query property on request object (req.query).
app.get('/api/products', (req, res) => {
if (!req.query.api) {
// if get request doesn't contain api param. then
// handle it accordingly, e.g. do redirect
return res.redirect('/products');
}
// request comming from ajax call, send JSON data back
res.json({ randomStuff: 'abcd' });
});
This is my node.js file, what's happening is, a user inputs an id and it gets sent to the server-side, which the server then takes and searches it through an api on a third-party. The problem I am having is, the response from the initial id sent by the user gets res.end before I get the info back from the api, thus getting a big fat undefined or an empty string on the client side. How can I make it so the res.end waits until it receives the data from the api and then sends it to the client? Or am I going at it entirely wrong and shouldn't be doing it this way?
var http = require('http');
var https = require('https');
var url = require('url');`
http.createServer(function (req, res) {
var id = url.parse(req.url,false).query;
var realId = getInfoFromApi(id);
res.setHeader('Access-Control-Allow-Origin', '*');
res.writeHead(200);
res.end(JSON.stringify(realId));
}).listen(3000, '127.0.0.1');
function getInfoFromApi(id) {
var url = "https://random.com/" + id + "?api_key=blabla";
var test = https.get(url, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
var theId = JSON.parse(body)
console.log("Got response: ", theId);
});
}).on('error', function(e) {
console.log("Got error: ", e);
});
return test;
}
Any info into what I should be looking or resources that would help me would be greatly appreciated, thanks in advance for any help.
Also as a bonus question, should I be using POST method instead of GET from server to api-server? Or does POST privacy only matter from client to server? I'm just curious if clever clients would be able to access the server http.requests if I use GET instead of POST or if it even matters at all since it's from the server and not the client.
Oh and another thing, I was debating if I should use https.createServer since I have to use https for https.get, that way I could remove the http module from require and only use https. Is there any big difference in performance or anything if I do that?
Thanks again in advance.
EDIT:
http.createServer(function (req, res) {
var id = url.parse(req.url, false).query;
var link = "https://random.com/" + id + "?api_key=blabla";
var testo = https.get(link, function (rez) {
var body = '';
rez.on('data', function (chunk) {
body += chunk;
});
rez.on('end', function () {
var endRes = body
res.setHeader('Access-Control-Allow-Origin', '*');
res.writeHead(200);
res.end(endRes);
});
}).on('error', function (e) {
console.log("Got error: ", e);
});
}).listen(3000, '127.0.0.1');
I managed to change it like this after I read into callbacks. Thank you for that. It works fine now, so happy, thanks again. My question now though is, does this work flawlessly if dozens or hundreds of people are searching at the same time? Does each client connected to the server get their own seperate function thing that runs at their own pace without interfering with others? Thanks again for all your help.
I really hope to find some answers here as i tried everything by now.
Background:
Overtime we deploy code to web server, we need to do a cache warm up, i.e. access the site and make sure it loads. First load is always the slowest since IIS require to do some manipulations with a new code and cache it.
Task:
Create a page which will a checkbox and a button. Once button is pressed, array of links sent to server. Server visits each link and provides a feedback on time it took to load each page to the user.
Solution:
I am using node JS & express JS on server side. So far i manage to POST array to the server with links, but since i have limited experience with node JS, i can not figure out server side code to work.
Here is a code i got so far (it is bits and pieces, but it gives an idea of my progress). Any help will be greatly appreciated!!!
var express = require("express");
var app = express();
var bodyParser = require('body-parser');
var parseUrlencoded = bodyParser.urlencoded({ extended: false});
var http = require("http");
function siteToPrime(url){
http.get(url, function (http_res) {
// initialize the container for our data
var data = "";
// this event fires many times, each time collecting another piece of the response
http_res.on("data", function (chunk) {
// append this chunk to our growing `data` var
data += chunk;
});
// this event fires *one* time, after all the `data` events/chunks have been gathered
http_res.on("end", function () {
// you can use res.send instead of console.log to output via express
console.log(data);
});
});
};
//Tells express where to look for static content
app.use(express.static('public'));
app.post('/', parseUrlencoded, function(request, response){
var newBlock = request.body;
console.log(Object.keys(newBlock).length);
var key = Object.keys(newBlock)[0];
console.log(newBlock[key]);
siteToPrime("www.google.com");
response.status(201);
});
app.listen(3000, function(){
console.log("Server listening on port 3000...");
});
Assuming that you have access to the array in the post route:
var express = require("express"),
request = require("request"),
app = express();
var start = new Date();
app.use(express.static('public'));
app.use(bodyParser.urlencoded({extended: false}));
function siteToPrime(req, res, urls) {
urls.forEach(function(url)) {
request(url, function(error, res, body) {
if (!error && res.statusCode == 200) {
console.log(url +' : ' + body);
console.log('Request took: ', new Date() - start, 'ms');
}
});
}
res.redirect('/');
};
app.post('/', function(req, res){
var urls = req.body.urls // Array os urls.
siteToPrime(req, res, urls);
});
app.listen(3000, function(){
console.log("Server listening on port 3000...");
});
To learn node js i make some exercise but have got a problem, the "res" variable work in index.js but doesn't work in the fs.exists function, why ?. Thanks for your responses.
server.js
var http = require("http");
global.PATH = __dirname;
http.createServer(function(req, res) {
var app = require("./index.js")(req, res);
res.end();
}).listen(8080);
index.js
var url = require("url");
var fs = require("fs");
module.exports = function(req, res){
if(req){
var pathname = url.parse(req.url).pathname.split("/");
pathname.splice(0,1);
var action = pathname[1];
fs.exists(PATH + "/" + pathname[0] + ".js" , function(exist){
var controller;
res.write('doesn\'t work');
if(exist)
{
if(!controller){
controller = require(PATH + "/controllers/" + pathname[0] + ".js" )();
if(controller[action])
controller[action]();
}
}
});
}
}
i don't know why some of you downvoted the question, because for a beginner some asynchronous patterns could be confusing...
first of all you should cache your require-call (as Seth mentioned).
global.PATH = __dirname;
var http = require("http");
var app = require("./index"); //omit .js, you don't need it
then you you index.js you are using fs.exists, which is asynchronous. that means, that the res.end() is called before your callback function inside fs.exists is reached, which means the request/response lifecylcle is over.
you could use fs.existsSync (not recommended!) or provide some callback which you call when done!
2 more things
the if(req) is unneccessary, there is always a request object!
you ALWAYS need to call the callback in your function, to make the response be sent!
server.js:
http.createServer(function(req, res) {
app(req, res, function () {
res.end();
});
}).listen(8080);
index.js:
var url = require("url");
var fs = require("fs");
module.exports = function(req, res, cb) { // see third cb-argument!
var pathname = url.parse(req.url).pathname.split("/");
pathname.splice(0,1);
var action = pathname[1];
fs.exists(PATH + "/" + pathname[0] + ".js" , function(exist){
var controller;
res.write('doesn\'t work');
if(exist)
{
if(!controller){
controller = require(PATH + "/controllers/" + pathname[0] + ".js" )();
if(controller[action])
controller[action]();
}
}
cb(); // whenever you're done call the callback
});
}
Recently I started learning a little bit about Node.js and it's capabilities and tried to use it for some web services.
I wanted to create a web service which will serve as a proxy for web requests.
I wanted my service to work that way:
User will access my service -> http://myproxyservice.com/api/getuserinfo/tom
My service will perform request to -> http://targetsite.com/user?name=tom
Responded data would get reflected to the user.
To implement it I used the following code:
app.js:
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
var proxy = require('./proxy_query.js')
function makeProxyApiRequest(name) {
return proxy.getUserData(name, parseProxyApiRequest);
}
function parseProxyApiRequest(data) {
returned_data = JSON.parse(data);
if (returned_data.error) {
console.log('An eror has occoured. details: ' + JSON.stringify(returned_data));
returned_data = '';
}
return JSON.stringify(returned_data);
}
app.post('/api/getuserinfo/tom', function(request, response) {
makeProxyApiRequest('tom', response);
//response.end(result);
});
var port = 7331;
proxy_query.js:
var https = require('https');
var callback = undefined;
var options = {
host: 'targetsite.com',
port: 443,
method: 'GET',
};
function resultHandlerCallback(result) {
var buffer = '';
result.setEncoding('utf8');
result.on('data', function(chunk){
buffer += chunk;
});
result.on('end', function(){
if (callback) {
callback(buffer);
}
});
}
exports.getUserData = function(name, user_callback) {
callback = user_callback
options['path'] = user + '?name=' + name;
var request = https.get(options, resultHandlerCallback);
request.on('error', function(e){
console.log('error from proxy_query:getUserData: ' + e.message)
});
request.end();
}
app.listen(port);
I wish I didn't screwed this code because I replaced some stuff to fit my example.
Anyway, the problem is that I want to post the response to the user when the HTTP request is done and I cant find how to do so because I use express and express uses asynchronous calls and so do the http request.
I know that if I want to do so, I should pass the makeProxyApiRequest the response object so he would be able to pass it to the callback but it is not possible because of asyn problems.
any suggestions?
help will be appreciated.
As you're using your functions to process requests inside your route handling, it's better to write them as express middleware functions, taking the specific request/response pair, and making use of express's next cascade model:
function makeProxyApiRequest(req, res, next) {
var name = parseProxyApiRequest(req.name);
res.locals.userdata = proxy.getUserData(name);
next();
}
function parseProxyApiRequest(req, res, next) {
try {
// remember that JSON.parse will throw if it fails!
data = JSON.parse(res.locals.userdata);
if (data .error) {
next('An eror has occoured. details: ' + JSON.stringify(data));
}
res.locals.proxyData = data;
next();
}
catch (e) { next("could not parse user data JSON."); }
}
app.post('/api/getuserinfo/tom',
makeProxyApiRequest,
parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
Even better would be to move those middleware functions into their own file now, so you can simply do:
var middleware = require("./lib/proxy_middleware");
app.post('/api/getuserinfo/tom',
middleware.makeProxyApiRequest,
middleware.parseProxyApiRequest,
function(req, res) {
// res.write or res.json or res.render or
// something, with this specific request's
// data that we stored in res.locals.proxyData
}
);
And keep your app.js as small as possible. Note that the client's browser will simply wait for a response by express, which happens once res.write, res.json or res.render etc is used. Until then the connection is simply kept open between the browser and the server, so if your middleware calls take a long time, that's fine - the browser will happily wait a long time for a response to get sent back, and will be doing other things in the mean time.
Now, in order to get the name, we can use express's parameter construct:
app.param("name", function(req, res, next, value) {
req.params.name = value;
// do something if we need to here, like verify it's a legal name, etc.
// for instance:
var isvalidname = validator.checkValidName(name);
if(!isvalidname) { return next("Username not valid"); }
next();
});
...
app.post("/api/getuserinfo/:name", ..., ..., ...);
Using this system, the :name part of any route will be treated based on the name parameter we defined using app.param. Note that we don't need to define this more than once: we can do the following and it'll all just work:
app.post("/api/getuserinfo/:name", ..., ..., ...);
app.post("/register/:name", ..., ..., ... );
app.get("/api/account/:name", ..., ..., ... );
and for every route with :name, the code for the "name" parameter handler will kick in.
As for the proxy_query.js file, rewriting this to a proper module is probably safer than using individual exports:
// let's not do more work than we need: http://npmjs.org/package/request
// is way easier than rolling our own URL fetcher. In Node.js the idea is
// to write as little as possible, relying on npmjs.org to find you all
// the components that you need to glue together. If you're writing more
// than just the glue, you're *probably* doing more than you need to.
var request = require("request");
module.exports = {
getURL: function(name, url, callback) {
request.get(url, function(err, result) {
if(err) return callback(err);
// do whatever processing you need to do to result:
var processedResult = ....
callback(false, processedResult);
});
}
};
and then we can use that as proxy = require("./lib/proxy_query"); in the middleware we need to actually do the URL data fetching.