I have written a node.js server.
I am able to extract the HTTP POST and GET variable from the request.
I would like to pass those variable to the js script on the server to be executed.
In PHP to execute a script I would just point to it # www.example.com/path/file.php?variable=value
<?php echo "You have ".$_GET['variable'];?>
I want to achieve the same with noge.js # www.example.com/path/file.njs?variable=value
My problem is the file.njs is executed as a text.
I am not using express, a solution without it would be appreciated.
var sys=require("sys"),
http=require("http"),
path=require("path"),
url=require("url"),
filesys=require("fs"),
mime=require("./node_modules/mime"),
head=require("./node_modules/headers/initreq.njs");//this is used to extract the header
http.createServer(handler).listen(80);//handler function is below
sys.puts("Node Server Running on 80");
function handler(request,response){
var myPath=url.parse(request.url).pathname;//get the url
var ext=myPath.replace(/(.)*\./,'');//get the extension
var fullPath=path.join(process.cwd(),myPath);//get the working dir & join it with current working dir
var mimeResult=mime.lookup(fullPath),acceptExt=['html','njs'];
if(acceptExt.indexOf(ext)!=-1){//only search HTTP header for html|njs files
head.init(request,response,setContent);//head will correctly contain the $_GET AND $_POST variable
} else {setContent();}
function setContent(){
path.exists(fullPath,function(exists){
if(!exists){
response.writeHeader(404, {"Content-Type":"text/plain"});
response.write("404 Not Found:: "+fullPath+"\n");
response.end();
}else{
filesys.readFile(fullPath,"binary",function(err,file){
if(err){
response.writeHeader(500,{"Content-Type":"text/plain"});
response.write(err+"::"+myPath+"\n");
response.end();
}else{
response.setHeader("Content-Type", mimeResult);
response.writeHeader(200);
response.write(file,"binary");//this is the file that i want to execute and pass the $_POST & $_GET variable
response.end();
}
});
}
});
}
sys.puts("Requested:: "+myPath.replace(/(.)*\//,'')+" - "+mimeResult );
}
Lets say you have a URL parameter named variable. I think this would work:
var parameters = url.parse(request.url, true);
var variable = parameters.variable;
I haven't used node.js in a while, but I'm pretty sure this works.
I don’t know what your code chunk is supposed to do, but here is a basic Hello World for you that prints out a get parameter:
var http = require('http');
var url = require('url');
var server = http.createServer(function (request, response) {
response.writeHead(200, {"Content-Type": "text/plain"});
var params = url.parse(request.url, true);
response.end("You have " + params.query.variable);
});
server.listen(8000);
Now just visit 127.0.0.1:8000/?variable=foo
In summary what I wanted to achieve is use node.js like PHP.
to execute a PHP file like
www.example.com/path/file.php?variable=value
With node.js
www.example.com/path/file.js?variable=value
The solution I have come up with is to make the requested javascript into a module and include it with the function require.
e.g.
http.createServer(function(req,res){
var myPath=url.parse(req.url).pathname;//get the url
var fullPath=path.join(process.cwd(),myPath);//get the working dir & join it with current working dir
require(fullPath).content(req,res);//this is where the requested script is executed as a module. make sure to end the response (response.end()) in the module
});
Though not thoroughly tested this solution works for me and I can use it even for dynamic pages.
Related
I don't want solution using Node.js, FileReader, or whatever else exept javascript!
Developing the html page, I encountered a problem as follows:
I get accurate results with this procedure, unfortunately the procedure remembers result of the first login page. Whatever text file in the meantime change the content, the procedure returns the first result.
Can someone give advice!
var filePath = "../../dir/sub dir/text_file.txt";
function getBackData(filePath){
var axd, i, artx, txli, tdr;
if(window.XMLHttpRequest){
axd = new XMLHttpRequest();
}else{
axd = new ActiveXObject("Microsoft.XMLHTTP");
}
axd.open('GET', filePath, true);
axd.onreadystatechange = function(){
if(axd.readyState == 4 && axd.status == 200){
artx = axd.responseText;
txli = artx.split("\n");
for(i = 0; i < txli.length; i++){
alert(txli[i]);
}
}
}
axd.send(null);
}
You could try :axd setRequestHeader('Cache-Control', 'no-cache');
Or try: axd.open('GET', filePath+'?_=' + new Date().getTime()), true); This will prevent your server from using the cash, because each request is different.
This is probably because the browser cache it. If you send a parameter which is almost always different like timestamp, you can disable the cache. Or you can try to use POST, since post request are never cached.
You are not allowed to request local files directly with ajax - you need a server to serve them. The browser is sandboxed and cannot generally open local files. This is a security measure - imagine what would happen if any website was allowed to open your files!
There are ways to set up a simple server for your images, like http-server. This allows you to serve files directly off a chosen directory, like so:
npm install -g http-server
http-server path-to-text-files/
Then you can request the files normally with ajax, at a path relative to the one your server is serving, like so:
url = "/dir/subdir/text-file.txt";
...
ajax.open('GET', url, true);
...
ajax.send(null);
I've an web app that injects a server based myjavascriptfile.js file from my server, using jQuery AJAX GET request. Currently, this GET request is called every time the client visits https://www.google.co.uk.
However I'd like to be able to send mysecondjavascriptfile.js file to the client, if the client has gone to https://www.google.co.uk more that 10 times.
Do you have any ways I can do this?
First thing to do, is to persist the hits the client do to the site. I think SessionStorage could help here:
sessionStorage.counter = ++(sessionStorage.counter) || 0;
var sources = {
lessThanTen : 'http://yourscript.com/lessthan10hits.js',
moreThanTen : 'http://yourscript.com/morethan10hits.js'
}
var script = document.createElement('script');
if(sessionStorage.counter >= 10){
script.src = sources.moreThanTen;
} else {
script.src = sources.lessThanTen;
}
document.getElementsByTagName('head')[0].appendChild(script);
This is of course a client-side verification of the hit. You could implement a server-side verification through AJAX or just serve a slightly different HTML markup after 10 requests. You'll need to use sessions (or just plain cookies) to persist them on the server-side.
AJAX verification:
var xhr = new XMLHttpRequest();
xhr.addEventListener('load', function(){
var script = document.createElement('script');
script.src = xhr.response;
document.getElementsByTagName('head')[0].appendChild(script);
});
xhr.open('POST', 'http://www.urltocheckhits.com/hits');
xhr.send('url=' + encodeURIComponent(window.location.hostname));
And then from Node.js (with body-parser and express-session):
var sources = {
lessThanTen : 'http://yourscript.com/lessthan10hits.js',
moreThanTen : 'http://yourscript.com/morethan10hits.js'
}
app.post('/hits', urlEncoded, function(req, res){
if(req.body){
var url = req.body.url;
if(!req.session.views){
req.session.views = { };
}
if(req.session.views[url]){
req.session.views[url]++;
} else {
req.session.views[url] = 1;
}
if(req.session.views[url] > 10){
res.send(sources.moreThanTen);
} else {
res.send(sources.lessThanTen);
}
}
});
I suggest you check the documentation of express-session and body-parser.
Note that you'll need to add CORS Headers for this (you could just as easily do it with JSONP too instead of using XHR).
Might be easier if you just serve the JS file instead of doing the AJAX call and then including the returned script. So then you could just:
<script src="http://onesingleurl.com/hits">
Caching will behave weird like this though, so that's why I favor the other approach.
I am trying use only Node (no additional npms or express). Here is my current route handler:
function requestHandler(req, res) {
var localFolder = __dirname + '/views/',
page404 = localFolder + '404.html',
fileToServe = "";
if(/\/posts\/\d+/.test(req.url)){
fileToServe = __dirname + req.url.match(/\/posts\/\d+/) +'.json';
fs.stat(fileToServe, function(err, contents){
if(!err && contents){
res.end(templateEngine('post',fileToServe));
} else {
res.end(templateEngine('error', err))
}
});
} else if (/\/posts\//.test(req.url)){
} else if (/.+[^\W]$/.test(req.url)){
fileToServe = __dirname + '/views' + req.url.match(/\/.+[^\W]$/gi);
fs.stat(fileToServe, function(err, contents){
if(!err && contents){
res.end(fs.readFileSync(fileToServe));
} else {
res.end(templateEngine('error', err))
}
});
}
}
I have two questions:
In one of my views if have a <link> tag with a css file. When I go straight to the path, it is served (this is the regex that catches it: /.+[^\W]$/.test(req.url)). However, as mentioned, one of my views built from the template engine uses the css file.
How does the browser work when it sees the link tag? Does it send a GET request to my local server (node)? If it does, why doesn't my server send a response back? When I go directly to the link, it sends the response perfectly fine.
Furthermore, when I try going to the page that uses the css file in the link tag, it just hangs on an empty page. When I kill the server, it says it never received a response (once again, when I go to the link directly, I get the proper file).
Do I have to re-organize my routes? Serve static files separately?
How does the browser work when it sees the link tag? Does it send a GET request to my local server (node)?
Yes. Browser creates the full URL based on the current URL and makes an HTTP GET request like it does for any other resource.
If it does, why doesn't my server send a response back? When I go directly to the link, it sends the response perfectly fine.
All evidence suggests that your page which links to the css is not being captured in the handler if-blocks. Try putting a couple of console.logs, one right inside the requestHandler and the other inside in the block which is supposed to handle the page request. I think you will only see one log show up in the server console.
I have this code (https://gist.github.com/2402116) :
server.get('/', function(request, response) {
var k = fs.readFileSync('./index.html','utf8');
response.send( k );
});
Tries to read this file:
https://gist.github.com/2402070
and the browser keeps loading and never end.
But if I remove all the js includes from the html file works fine.
what am I doing wrong?
Your current server implementation does not do anything but serve index.html to requests for the base url, i.e. '/'. You will need to write further code/routes to serve the requests for the js includes in your index.html, i.e. '/app.js' and the various js files in '/js/'.
Now, the routing implementation in the gist is quite crude and doesn't support many aspects of url matching. The original code is clearly just demonstrating a concept for a single page site with no resources. You will see it will quickly become burdensome to get your code working as you will effectively have to write a route for every resource request, e.g.
server.get('/app.js', function(request, response) {
var k = fs.readFileSync('./app.js','utf8');
response.send( k );
});
server.get('/js/jquery-1.7.2.js', function(request, response) {
var k = fs.readFileSync('./js/jquery-1.7.2.js','utf8');
response.send( k );
});
etc...
You are better off looking at a node.js url routing library already out there (e.g. director) or a web framework such as express which has inbuilt support for routing (and static file serving).
You need a response.end() once you are done sending data to your browser.
Actually, since you are sending all of your data at once, you can just replace response.send(k) with response.end(k). Although this method is not recommended. I highly recommend reading your file asynchronously and sending it to the client chunk-by-chunk.
See also: http://nodejs.org/api/http.html#http_response_end_data_encoding
try .toString on k and not send but .end
response.end( k.toString() );
maybe some wierd things happens and he tries to eval the code
I tried to play a bit with node.js and wrote following code (it doesn't make sense, but that does not matter):
var http = require("http"),
sys = require("sys");
sys.puts("Starting...");
var gRes = null;
var cnt = 0;
var srv = http.createServer(function(req, res){
res.writeHeader(200, {"Content-Type": "text/plain"});
gRes = res;
setTimeout(output,1000);
cnt = 0;
}).listen(81);
function output(){
gRes.write("Hello World!");
cnt++;
if(cnt < 10)
setTimeout(output,1000);
else
gRes.end();
}
I know that there are some bad things in it (like using gRes globally), but my question is, why this code is blocking a second request until the first completed?
if I open the url it starts writing "Hello World" 10 times. But if I open it simultaneous in a second tab, one tab waits connecting until the other tab finished writing "Hello World" ten times.
I found nothing which could explain this behaviour.
Surely it's your overwriting of the gRes and cnt variables being used by the first request that's doing it?
[EDIT actually, Chrome won't send two at once, as Shadow Wizard said, but the code as is is seriously broken because each new request will reset the counter, and outstanding requests will never get closed].
Instead of using a global, wrap your output function as a closure within the createServer callback. Then it'll have access to the local res variable at all times.
This code works for me:
var http = require("http"),
sys = require("sys");
sys.puts("Starting...");
var srv = http.createServer(function(req, res){
res.writeHeader(200, {"Content-Type": "text/plain"});
var cnt = 0;
var output = function() {
res.write("Hello World!\n");
if (++cnt < 10) {
setTimeout(output,1000);
} else {
res.end();
}
};
output();
}).listen(81);
Note however that the browser won't render anything until the connection has closed because the relevant headers that tell it to display as it's downloading aren't there. I tested the above using telnet.
I'm not familiar with node.js but do familiar with server side languages in general - when browser send request to the server, the server creates a Session for that request and any additional requests from the same browser (within the session life time) are treated as the same Session.
Probably by design, and for good reason, the requests from same session are handled sequentially, one after the other - only after the server finish handling one request it will start handling the next.