node.js page refresh calling resources twice? - javascript

I have a basic node.js setup, but I think I'm missing something.
My main.js:
var a = require('./another.js');
var http = require('http');
http.createServer(function (request, response) {
response.writeHead(200,
{
'Content-Type': 'text/html'
});
response.write(a.saysomething('Hi there!') + '<br />');
response.end();
}).listen(27182);
console.log('Server running at http://127.0.0.1:27182/');
My another.js:
exports.saysomething = function (str) {
console.log('in saysomething!');
return str + ' hey there!';
}
The issue is that my console is outputting in saysomething! twice. Am I missing something? When I refresh the page, I expect only one in saysomething!.

Your browser is likely trying to get favicon.ico in addition to the url you are hitting. Try to just do the command on the command line: curl http://127.0.0.1:27182/
Also, you can add logging for request.url. That will show you which urls are being requested.

Here's another way to understand the problem. This node server will track all the requests and show them to you on the browser in JSON. Then you can hit F5 and watch the requests you are making.
var http = require("http");
var messages = [];
http.createServer(function(request, response) {
console.log("We got a hit # " + new Date());
messages.push(request.url);
messages.push(request.headers);
  response.writeHead(200, {"Content-Type": "text/plain"});
for ( var i = 0; i < messages.length; ++i ) {
  response.write("\n\n" + JSON.stringify(messages[i]));
}
  response.end();
}).listen(8888);
PS: See also this duplicate for more info nodejs - http.createServer seems to call twice

Related

Making asynchronous requests within a Node.js server error:"TypeError: Object #<Object> has no method '_implicitHeader'"

So what I was trying to do was basically make a server that would receive a request, make a new request, receive the new response, and then relay that to the original source.
Here was my code:
var http = require('http');
var request = require('request');
var server = http.createServer(function(request, response){
response.writeHead(200, {"Content-Type": "text/plain"});
response.write('on');
getSummonerId(response);
});
server.listen(9000);
console.log("Server running!");
function getSummonerId(cb){
request('https://na.api.pvp.net/api/lol/na/v1.4/summoner/by-name/' + 'someones-name' + '?api_key=mykeygoeshere', function(request, response, body){
cb(JSON.parse(body));
});
}
When I did that, I got TypeError: Object #<Object> has no method '_implicitHeader'.
Upon further searching through the tome of googel, I found these:
https://github.com/joyent/node/issues/5597
How can I properly implement the nodejs Stream API?
I think it might be the problem I'm having, but I'm having trouble translating the context of the original question into the context of my problem.
Well, I think there are multiple issues here, but the big one is that your passing the response object into your getSummonerId() function as a callback, but it's not function, it's an object. How about trying something like this:
var http = require('http');
var request = require('request');
var server = http.createServer(function(request, response){
response.writeHead(200, {"Content-Type": "text/plain"});
// response.write('on'); // why do you have this?
getSummonerId(function(data) { // notice that we pass in a function, but keep response handling up here
response.end(JSON.stringify(data)); // HTTP responses must be strings
});
});
server.listen(9000);
console.log("Server running!");
function getSummonerId(cb){
request('https://na.api.pvp.net/api/lol/na/v1.4/summoner/by-name/' + 'someones-name' + '?api_key=mykeygoeshere', function(request, response, body){
cb(JSON.parse(body));
});
}

Static HTTP File Server in NodeJs: Sometimes external .js and .css files load properly, sometimes they don't?

Edit: I know using express or whatever would be easier, but this is all a learning exercise, so sorry if this is all massively convoluted, haha!
Edit 2: It appears (after adding a few console logs for debugging) that it seems the problem has something to do with the fact that when the browser makes one request to the server (say, for style.css), it makes another request (e.g. for login-fail.js) before completing the response for the first request. It seems these multiple requests from the browser cause some sort of problem, each subsequent request preventing the previous from completing. Yikes, I need help.
Edit 3: After some debugging, it appears that the pathname variable does not change its value upon each request. For some reason, pathname's value persists over each request and this makes every request's response the same - stranger still, the value for uri changes over each request (and the uri is what gives the pathname its value...) still trying to find out why this bizarre behaviour is happening.
So I've been having this problem when the server makes requests for external .js and .css files linked to specific .html files. The responses are always inconsistent. For instance, sometimes the code will run perfectly, other times the css will load and not the js, sometimes both, sometimes neither. I am not able to determine whether this is because my code is synchronous, or for some other reason. Here is my code:
Server.js
//Module requires
var http = require("http"),
fs = require("fs"),
path = require('path'),
url = require('url'),
invoke = require("./invoke");
//Object "MIMETYPES"
//Maps relationships between file extensions and their respective MIME Content-Types
var MIMETYPES = {
".html": "text/html",
".jpeg": "image/jpeg",
".jpg": "image/jpeg",
".png": "image/png",
".js": "text/javascript",
".css": "text/css"
};
//Object "invokeOptions"
//Options passed to Invoke() function for POST requests
var invokeOptions = {
postData : "",
uri : ""
}
var PORT = 8888;
//HTTP Server Begin
http.createServer(function(req, res) {
var uri = url.parse(req.url).pathname;
pathname = path.resolve(__dirname, "..") + uri;
console.log("Recieved " + req.method + " request for : " + uri);
invokeOptions.uri = uri;
//GET requests wrapper
if (req.method == "GET"){
//Invoke() function handler for GET requests
if (path.extname(pathname) == ""){
invoke.invoke(invokeOptions, req, res);
return;
}
//Static file server for GET requests
fs.exists(pathname, function(exists) {
if(!exists) {
console.log("Requested file \'" + pathname + "\' doesn't exist.");
res.writeHead(404, {'Content-Type': 'text/plain'});
res.write('404 Not Found\n');
res.end();
return;
}
var contentType = MIMETYPES[path.extname(pathname)];
res.writeHead(200, {"Content-Type" : contentType});
console.log("Current URI: " + uri + " has content type: " + contentType);
fs.createReadStream(pathname).pipe(res);
return;
});
}
//POST requests wrapper
if (req.method == "POST"){
var postData = "";
req.on("data", function(postPacket) {
postData += postPacket;
});
req.on("end", function() {
invokeOptions.postData = postData;
invoke.invoke(invokeOptions, req, res);
return;
});
}
}).listen(PORT);
console.log ("Server listening on port: " + PORT);
Invoke.js - This handles requests for non-files, that is requests for functions on the server
var fs = require("fs"),
querystring = require("querystring"),
path = require("path");
function invoke (options, req, res){
process.stdout.write("Invoking function --> ");
if (options.uri == "/"){
console.log("Index");
res.writeHead(200, {"Content-Type" : "text/html"});
fs.createReadStream("../index.html").pipe(res);
return;
}
if (options.uri == "/login"){
console.log("Login");
fs.readFile(path.resolve("../users.json"), "UTF-8", function(err, data){
if (err) throw err;
var json = JSON.parse(data);
var user = querystring.parse(options.postData).username,
password = querystring.parse(options.postData).password;
console.log("Submitted Username: " + user + "\nSubmitted Password: " + password);
if (json.users[0].username == user && json.users[0].password == password){
res.writeHead(200, {"Content-Type" : "text/html"});
fs.createReadStream("../app.html").pipe(res);
return;
}
else {
res.writeHead(300, {"Content-Type" : "text/html"});
fs.createReadStream("../login-fail.html").pipe(res);
return;
}
});
}
else {
console.log("Error! Bad request.");
res.writeHead(400, {"Content-Type" : "text/plain"});
res.end("Error 400: Bad Request. \nThere is no function corresponding to that request.");
}
}
exports.invoke = invoke;
Login-fail.js - This is the code that hardly ever loads
$(document).ready(function() {
var current = 3;
var countdown = $(".countdown");
function down (){
current--;
if (current != 0){
countdown.text(current);
}
else {
clearInterval(interval);
window.location.replace("./");
}
}
var interval = setInterval(down, 1000);
});
Basically, the index.html file is a form which accepts a username and password, compares the submitted POST data to a json file, and if it matches the hashes in the json file it requests app.html, otherwise it requests login-fail.html. When the login-html file is called, it has linked to it css and js which when requested hardly ever run!
Also, I thought it should be noted that the console.logs for "content-type" when requesting the css is 'text/javascript' when it doesn't work. Any help would be massively appreciated!
Holy crap.
Pathname wasn't being declared as a variable each request, because I used a ; instead of a ,
I'll go die now ladies and gents.
The relative paths you're using in your login-fail.html are probably not resolving correctly because the URL path doesn't change (/login), so the browser is looking for /login/css/style.css and /login/js/login-fail.js. Try modifying your login-fail.html to use absolute paths instead of relative paths.

Node.js Server: Image Upload / Corruption Issues

So I'm trying to write a basic file server in Node.js, and all the images I've tried uploading and storing on it are coming back as corrupted. The problem seems to have something to do with the way that Node Buffers handle being converted to UTF-8 and back again (which I have to do in order to get the POST body headers out and away from the binary data).
Here's a simple Node server that shows my current approach and the problems I've been having:
var http = require('http');
var server = http.createServer(function(request, response) {
if (request.method === "GET") {
// on GET request, output a simple web page with a file upload form
var mypage = '<!doctype html><html><head><meta charset="utf-8">' +
'<title>Submit POST Form</title></head>\r\n<body>' +
'<form action="http://127.0.0.1:8008" method="POST" ' +
'enctype="multipart/form-data"> <input name="upload" ' +
'type="file"><p><button type="submit">Submit</button>' +
'</p></form></body></html>\r\n';
response.writeHead(200, {
"Content-Type": "text/html",
"Content-Length": mypage.length
});
response.end(mypage);
} else if (request.method === "POST") {
// if we have a return post request, let's capture it
var upload = new Buffer([]);
// get the data
request.on('data', function(chunk) {
// copy post data
upload = Buffer.concat([upload, chunk]);
});
// when we have all the data
request.on('end', function() {
// convert to UTF8 so we can pull out the post headers
var str = upload.toString('utf8');
// get post headers with a regular expression
var re = /(\S+)\r\nContent-Disposition:\s*form-data;\s*name="\w+";\s*filename="[^"]*"\r\nContent-Type: (\S+)\r\n\r\n/i,
reMatch = str.match(re);
var lengthOfHeaders = reMatch[0].length,
boundary = reMatch[1],
mimeType = reMatch[2];
// slice headers off top of post body
str = str.slice(lengthOfHeaders);
// remove the end boundary
str = str.replace("\r\n" + boundary + "--\r\n", '');
// convert back to buffer
var rawdata = new Buffer(str, 'utf8');
// echo back to client
response.writeHead(200, {
"Content-Type": mimeType
});
response.end(rawdata);
});
}
});
server.listen(8008);
console.log("server running on port 8008");
To test it, run the script in node and go to 127.0.0.1:8008 in your browser. Try uploading an image and submitting the form. The image comes back as corrupt every time -- even though the script should just be directly echoing the image data back to the browser.
So does anyone know what I'm doing wrong here? Is there a better way to handle POST body headers in Node that I haven't figured out yet? (And before anyone says anything, no, I don't want to use Express. I want to figure out and understand this problem.)
The problem seems to have something to do with the way that Node Buffers handle being converted to UTF-8 and back again
I guess you are right about that, convert to UTF-8 is a bad idea, but can do it just to work with the file and get the headers and boundaries positions, but keep the buffer file untouched, and when you have all the positions to get the header and boundary out of the file just copy the buffer to a new buffer like that
originalBuffer.copy(newBuffer,0, positionHeader, positionEndBoundary)
var http = require('http');
var fs = require('fs');
var connections = 0;
var server = http.createServer(function (req, res) {
connections++;
console.log(req.url,"connections: "+connections);
if(req.url == '/'){
res.writeHead(200, { 'content-type': 'text/html' });
res.end(
'<form action="/upload" enctype="multipart/form-data" method="post">' +
'<input type="file" name="upload" multiple="multiple"><br>' +
'<input type="submit" value="Upload">' +
'</form>'
);
}
var body = new Buffer([]);
if (req.url == '/upload') {
req.on('data', function (foo) {
//f.write(foo);
body = Buffer.concat([body,foo]);
if(isImage(body.toString())){
console.log("é imagem do tipo "+isImage(body.toString()));
}
else{
console.log("Não é imagem");
res.end("Não é imagem");
}
console.log(body.length, body.toString().length);
});
req.on('end', function () {
// console.log(req.headers);
//I converted the buffer to "utf 8" but i kept the original buffer
var str = body.toString();
console.log(str.length);
imageType = isImage(body.toString());
//get the index of the last header character
//I'm just using the string to find the postions to cut the headers and boundaries
var index = str.indexOf(imageType)+(imageType+"\r\n\r\n").length;
// var headers= str.slice(0,index).split(';');
// console.log(headers);
//Here comes the trick
/*
*I have to cut the last boundaries, so i use the lastIndexOf to cut the second boundary
* And maybe that is the corruption issues, because, I'm not sure, but I guess
* the UTF-8 format only use 7bits to represent all characters, and the buffer can use 8bits, or two hex,
*So, i need to take the difference here (body.length-str.length)
*/
var indexBoundayToBuffer = str.lastIndexOf('------WebKitFormBoundary')+(body.length-str.length);
console.log(index, indexBoundayToBuffer);
//maybe you can change this to use less memory, whatever
var newBuffer = Buffer.alloc(body.length);
/*
*And now use the index, and the indexBoudayToBuffer and you will have only the binary
*/
body.copy(newBuffer,0,index,indexBoundayToBuffer);
// f.end();
//file type
var type = imageType.substr("image/".length);
console.log("END");
fs.writeFile("nameFile."+type,newBuffer,function(err,ok){
if(err){
console.log(err);
return false;
}
res.end();
});
});
}
});
function isImage(str){
if(str.indexOf('image/png')!=-1) return 'image/png';
else if(str.indexOf('image/jpeg')!=-1) return 'image/jpeg';
else if(str.indexOf('image/bmp'!=-1)) return 'image/bmp';
else if(str.indexOf('image/gif'!=-1)) return 'image/gif';
else false;
}
var port = process.env.PORT || 8080;
server.listen(port, function () {
console.log('Recording connections on port %s', port);
});
You really shouldn't use regular expressions like that to parse multipart payloads as it can easily make trying to parse your image data very unreliable. There are modules on npm that parse forms for you such as busboy, multiparty, or formidable. None of them use regular expressions and they don't require Express.

Routing http requests through Node.js

I'm trying to make a cucumber test setup with Node.js that can test any website by using an iframe.
Normally the iframe is a no go because of cross script security limitations.
However if it was possible (I'm sure it is. And i trust you to come up with a solution)
to fetch the website being target for the test via the requested url when a specific url name is being requested, so that the iframe would be loaded with a copy of the test target.
Basically just a standard node.js server that fetches specific pages based on the req.url
Akin to an Address Request Router.
Here is my blatant attempt to do exactly that.
Fetching the test page via. the url works.
But i'm having a problem switching from the http server to the connection object.
Is there a way to "feed" the connection with the http server response?
PS. i also created a solution with two node.js servers.
Node 1 fetched the test target and mixing it with cucumber test page.
Node 2 hosting the cucumber test.
This solution is working. But it creates problems on websites where javascript naming conflicts occur. Which is why the iframe solution, that solves this problem by encapsulation is more appealing.
var http = require('http');
var connect = require('connect');
var port = process.env.PORT || 8788;
var server = http.createServer(function(req, webres)
{
var url = req.url;
console.log(url);
if(url == '/myWebsiteToBeTestedWithCucumberJS')
{
// Load the web site to be tested "myWebsiteToBeTestedWithCucumberJS"
// And update the references
// Finaly write the page with the webres
// The page will appear to be hosted locally
console.log('Loading myWebsiteToBeTestedWithCucumberJS');
webres.writeHead(200, {'content-type': 'text/html, level=1'});
var options =
{
host: 'www.myWebsiteToBeTestedWithCucumberJS.com,
port: 80,
path: '/'
};
var page = '';
var req = http.get(options, function(res)
{
console.log("Got response: " + res.statusCode);
res.on('data', function(chunk)
{
page = page + chunk;
});
res.on('end', function()
{
// Change relative paths to absolute (actual web location where images, javascript and stylesheets is placed)
page = page.replace(/ href="\/\//g , ' href="/');
page = page.replace(/ src="\//g , ' src="www.myWebsiteToBeTestedWithCucumberJS.com');
page = page.replace(/ data-src="\//g , ' data-src="www.myWebsiteToBeTestedWithCucumberJS.com');
page = page.replace(/ href="\//g , ' href="www.myWebsiteToBeTestedWithCucumberJS.com');
webres.write(page);
webres.end('');
});
});
}
else
{
// Load any file from localhost:8788
// This is where the cucumber.js project files are hosted
var dirserver = connect.createServer();
var browserify = require('browserify');
var cukeBundle = browserify({
mount: '/cucumber.js',
require: ['cucumber-html', './lib/cucumber', 'gherkin/lib/gherkin/lexer/en'],
ignore: ['./cucumber/cli', 'connect']
});
dirserver.use(connect.static(__dirname));
dirserver.use(cukeBundle);
dirserver.listen(port);
}
}).on('error', function(e)
{
console.log("Got error: " + e.message);
});
server.listen(port);
console.log('Accepting connections on port ' + port + '...');
Well it wasn't so difficult after all.
Being new to node.js i had to realize the possibilties of using multiple listeners.
Reading on nodejitsu's features helped me solve the problem.
Below example loads www.myWebsiteToBeTestedWithCucumberJS.com
when specifying the url as follows: http://localhost:9788/myWebsiteToBeTestedWithCucumberJS
where all other requests is handled as cucumber.js website requests.
Hope this make sense to other node.js newcucumbers.
var http = require('http');
var connect = require('connect');
var port = process.env.PORT || 9788;
var server = http.createServer(function(req, webres)
{
var url = req.url;
console.log(url);
if(url == '/myWebsiteToBeTestedWithCucumberJS')
{
loadMyWebsiteToBeTestedWithCucumberJS(req, webres);
}
else
{
loadLocal(req, webres, url);
}
}).on('error', function(e)
{
console.log("Got error: " + e.message);
});
server.listen(port);
console.log('Accepting connections on port ' + port + '...');
function loadMyWebsiteToBeTestedWithCucumberJS(req, webres)
{
console.log('Loading myWebsiteToBeTestedWithCucumberJS');
webres.writeHead(200, {'content-type': 'text/html, level=1'});
var options =
{
host: 'www.myWebsiteToBeTestedWithCucumberJS.com',
port: 80,
path: '/'
};
var page = '';
var req = http.get(options, function(res)
{
console.log("Got response: " + res.statusCode);
res.on('data', function(chunk)
{
page = page + chunk;
});
res.on('end', function()
{
page = page.replace(/ href="\/\//g , ' href="/');
page = page.replace(/ src="\//g , ' src="http://www.myWebsiteToBeTestedWithCucumberJS.com/');
page = page.replace(/ data-src="\//g , ' data-src="http://www.myWebsiteToBeTestedWithCucumberJS.com/');
page = page.replace(/ href="\//g , ' href="http://www.myWebsiteToBeTestedWithCucumberJS.com/');
webres.write(page);
webres.end('');
});
});
}
function loadLocal(req, webres, path)
{
console.log('Loading localhost');
webres.writeHead(200, {'content-type': 'text/html, level=1'});
var options =
{
host: 'localhost',
port: 9787,
path: path
};
var page = '';
var req = http.get(options, function(res)
{
console.log("Got response: " + res.statusCode);
res.on('data', function(chunk)
{
page = page + chunk;
});
res.on('end', function()
{
webres.write(page);
webres.end('');
});
});
}
// Cucumber site listening on port 9787
var dirserver = connect.createServer();
var browserify = require('browserify');
var cukeBundle = browserify(
{
mount: '/cucumber.js',
require: ['cucumber-html', './lib/cucumber', 'gherkin/lib/gherkin/lexer/en'],
ignore: ['./cucumber/cli', 'connect']
});
dirserver.use(connect.static(__dirname));
dirserver.use(cukeBundle);
dirserver.listen(9787);
var http = require('http');
// Create a server object
http.createServer(function (req, res) {
// http header
res.writeHead(200, {'Content-Type': 'text/html'});
var url = req.url;
if(url ==='/about') {
res.write(' Welcome to about us page');
res.end();
}
else if(url ==='/contact') {
res.write(' Welcome to contact us page');
res.end();
}
else {
res.write('Hello World!');
res.end();
}
}).listen(3000, function() {
// The server object listens on port 3000
console.log("server start at port 3000");
});

Problem receiving response from node server using ajax post request

I have written a http server using node js
var sys = require("sys"),
http = require("http"),
url = require("url"),
path = require("path"),
fs = require("fs");
http.createServer(function(request, res) {
var parsed_url = url.parse(request.url);
var uri = parsed_url.pathname;
if(uri === "/test"){
res.writeHead(200, {'Content-Type': 'text/javascript'});
request.addListener('data', function (chunk) {
var data = eval("(" + chunk + ")");
console.log(data[0].id);
})
request.addListener('end', function() {
console.log('end triggered');
res.write("Post data");
res.end();
});
}
}).listen(8080);
and i am trying to send back response of ajax request but i am unable to receive any response. Here is the code for ajax request ,
var myhttp = new XMLHttpRequest();
var url = "http://localhost:8080/test";
var data = [{"a":"1"},{"b":"2"},{"c":"3"}];
var dataJson = JSON.stringify(data);
myhttp.open('POST', url, true);
myhttp.send(dataJson);
myhttp.onreadystatechange = function() {
if ((myhttp.readyState == 4) && (myhttp.status == 200)){
alert(myhttp.responseText);
}
else if ((myhttp.readyState == 4) && (myhttp.status != 200))
{
console.log("Error in Connection");
}
Can anyone help me what i am doing wrong ...
Thanks
Vinay
Your code is almost right but on your code sample you have
console.log(data[0].id)
the data object has no property id so if you only have
console.log(data[0])
there you have a response like
{ a: '1' }
therefore you can access the property a by doing
console.log(data[0].a);
UPDATED Updated with a full example
One more thing is that you are using eval and node comes with JSON.parse bundle with it so the snippet below is how i made it work
File: app.js
var sys = require("sys"),
http = require("http"),
url = require("url"),
path = require("path"),
fs = require("fs");
http.createServer(function(request, res) {
var parsed_url = url.parse(request.url);
var uri = parsed_url.pathname;
if(uri === "/test"){
res.writeHead(200, {'Content-Type': 'text/javascript'});
request.addListener('data', function (chunk) {
// removed this - eval("(" + chunk + ")");
var data = JSON.parse(chunk);
console.log(data[0].a);
})
request.addListener('end', function() {
console.log('end triggered');
res.write("Post data");
res.end();
});
} else if(uri === "/") {
fs.readFile("./index.html",function(err, data){
if(err) throw err;
res.writeHead(200, {'Content-Type': 'text/html'});
res.end(data);
});
}
}).listen(8080);
On the same directory create a file index.html with the following:
<html>
<head>
<script type="text/javascript" charset="utf-8">
var myhttp = new XMLHttpRequest();
var url = "http://localhost:8080/test";
var data = [{"a":"1"},{"b":"2"},{"c":"3"}];
var dataJson = JSON.stringify(data);
myhttp.open('POST', url, true);
myhttp.send(dataJson);
myhttp.onreadystatechange = function() {
if ((myhttp.readyState == 4) && (myhttp.status == 200)){
alert(myhttp.responseText);
}
else if ((myhttp.readyState == 4) && (myhttp.status != 200))
{
console.log("Error in Connection");
}
}
</script>
</head>
<body>
</body>
</html>
That is a complete working example of what you want.
With regards to the same origin policy issues you were having is mainly due to the fact that you cant POST data between 2 different domains via ajax unless you use some tricks with iframes but that is another story.
Also i think is good for anyone to understand the backbone of a technology before moving into frameworks so fair play to you.
good luck
You have to read the data in a different way. Posted data arrives on a node server in chunks (the 'data' event), that have to be collected until the 'end' event fires. Inside this event, you are able to access your payload.
var body = '';
request.addListener('data', function (chunk) {
body += chunk;
});
request.addListener('end', function() {
console.log(body);
res.write('post data: ' + body);
});
Additionaly, there seem to be some issues with your client-side code (especially concerning the status-code checks), but i can't really help you with those as i always work with frameworks like jQuery to manage async requests.
If you want to build reliable node.js servers for web use, i highly recommend the high-performance HTTP-Framework Express. It takes away alot of the pain when developing a web-based server application in node and is maintained actively.

Categories